From 364268c7182939097102a53a95d71fdbb1ff3606 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 30 Nov 2020 09:09:16 +1300 Subject: [PATCH 01/28] WIP --- common/config/rush/pnpm-lock.yaml | 109 ++++++++++++++++++++++++------ dataplane.code-workspace | 12 ++++ rush.json | 15 ++++ 3 files changed, 116 insertions(+), 20 deletions(-) diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index f17bdf6eb810..b2fd9a8707a4 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -47,6 +47,9 @@ dependencies: '@rush-temp/storage-file-share': 'file:projects/storage-file-share.tgz' '@rush-temp/storage-internal-avro': 'file:projects/storage-internal-avro.tgz' '@rush-temp/storage-queue': 'file:projects/storage-queue.tgz' + '@rush-temp/synapse-accesscontrol': 'file:projects/synapse-accesscontrol.tgz' + '@rush-temp/synapse-artifacts': 'file:projects/synapse-artifacts.tgz' + '@rush-temp/synapse-spark': 'file:projects/synapse-spark.tgz' '@rush-temp/template': 'file:projects/template.tgz' '@rush-temp/test-utils-perfstress': 'file:projects/test-utils-perfstress.tgz' '@rush-temp/test-utils-recorder': 'file:projects/test-utils-recorder.tgz' @@ -1876,6 +1879,12 @@ packages: node: '>=0.10.0' resolution: integrity: sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8= + /builtin-modules/2.0.0: + dev: false + engines: + node: '>=4' + resolution: + integrity: sha512-3U5kUA5VPsRUA3nofm/BXX7GVHKfxz0hOBAPxXrIvHzlDRkQVqEn6yi8QJegxl4LzOHLdvb7XF5dVawa/VVYBg== /builtin-modules/3.1.0: dev: false engines: @@ -7195,6 +7204,15 @@ packages: dev: false resolution: integrity: sha512-xRkB+W/m1KLIzPUmG0ofvR+CPNcvuCuNdjVBVS7ALKSxr3EDhnzNceGkGi1m8MToSli13AzKFYH4ie9w3I5L3g== + /rollup-plugin-node-resolve/3.4.0: + dependencies: + builtin-modules: 2.0.0 + is-module: 1.0.0 + resolve: 1.17.0 + deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-node-resolve. + dev: false + resolution: + integrity: sha512-PJcd85dxfSBWih84ozRtBkB731OjXk0KnzN0oGp7WOWcarAFkVa71cV5hTJg2qpVsV2U8EUwrzHP3tvy9vS3qg== /rollup-plugin-shim/1.0.0: dev: false resolution: @@ -9021,7 +9039,7 @@ packages: dev: false name: '@rush-temp/ai-anomaly-detector' resolution: - integrity: sha512-aaUKAVShh4trVKgoKcP+d37S1xKC0UrnVAe5A0Yx5asTn8v0KyubMDmFNQE6I8EObiHHNP9Hv7EmdY07vKIokw== + integrity: sha512-im0Az15EfcttIX5T61sj0aRo6KT6NP4dQeWJ3TQFJUR33Oco7W15VRMv+UXT1Eqwe3a/z3rqIvdBPZF5JYsA5A== tarball: 'file:projects/ai-anomaly-detector.tgz' version: 0.0.0 'file:projects/ai-form-recognizer.tgz': @@ -9082,7 +9100,7 @@ packages: dev: false name: '@rush-temp/ai-form-recognizer' resolution: - integrity: sha512-itk870Qe396YuKoaQI1kkeHsD4S1oGxIgPnwm6NDQjkc/EVGR/ELp+HxNGUdsUiefo6n4w1B2ouQoREOa4uA0A== + integrity: sha512-ggLKyydURwTpuEH3YqnsqWowerF8nOCZnEpH+DjH0i3ryU67fJ+cLHttIIQ/z9lqRrZlv0djJ9cVEUGBHuzaUw== tarball: 'file:projects/ai-form-recognizer.tgz' version: 0.0.0 'file:projects/ai-metrics-advisor.tgz': @@ -9127,7 +9145,7 @@ packages: dev: false name: '@rush-temp/ai-metrics-advisor' resolution: - integrity: sha512-Lcl1NARcCru6Y/gAUpz/URvOQGpyLnVNf33oLHmdPtV3Fp9TQzMl9gmymaipU6MfL5rsI7Il3mER8fjemzeThQ== + integrity: sha512-BlG5txD2yfImdOEmTLfpTGoVn5hhjzqQD3ZZOVR/ZYAX9erIPCa3dmycJ35dE3q2LtLqR0zMmMuEvGHji4Uy6g== tarball: 'file:projects/ai-metrics-advisor.tgz' version: 0.0.0 'file:projects/ai-text-analytics.tgz': @@ -9190,7 +9208,7 @@ packages: dev: false name: '@rush-temp/ai-text-analytics' resolution: - integrity: sha512-w7v5zDItgaPNzOOL3lkjQpSAJgp9Ex86OQckrThCfwYicUvQ2rANqacVz5d6KseFRP1OyenPXM1KaFyY3a1KqQ== + integrity: sha512-ynAlVrS3nk6apbPNyP3i/xi5+IOi3yfae4LNT6gqoxQ4vxRpQEF79d6qi3ujYZf9UzwLn3Vksdr0WQM26LYVBQ== tarball: 'file:projects/ai-text-analytics.tgz' version: 0.0.0 'file:projects/app-configuration.tgz': @@ -9250,7 +9268,7 @@ packages: dev: false name: '@rush-temp/app-configuration' resolution: - integrity: sha512-g4Ch0EtyNeX5u75WQoSVQPcelPMMMQbRUSl9sICbmS2NSddOvicHzvDWB31qCsPccRrOZKZrgVIOA4xst7p7vQ== + integrity: sha512-JXMfo451lyxj07kUIQBSAow4FgLaBi1TtCHcm2VBsKvvT0JuFrNvvTIUwh+aKbUvkn/ZDDYJqQIkZc9vzWj2/A== tarball: 'file:projects/app-configuration.tgz' version: 0.0.0 'file:projects/communication-administration.tgz': @@ -9556,7 +9574,7 @@ packages: dev: false name: '@rush-temp/core-amqp' resolution: - integrity: sha512-Nk2z5Ms2pZsaLcHWcB13/dcFCR3RS5/OWFSMM0hh/heqOxKhog9RJXWnP1DGXhLLp9vibitt7XHBDXZkajra3g== + integrity: sha512-x6mKN9gSowCFmjRbwapUXG+7Q8SXgIj04M9Uxzz2em44VtCvLxDt1xQEoVM735J1ggwtEKJeNU3NwGO125NfEA== tarball: 'file:projects/core-amqp.tgz' version: 0.0.0 'file:projects/core-arm.tgz': @@ -10215,7 +10233,7 @@ packages: dev: false name: '@rush-temp/digital-twins-core' resolution: - integrity: sha512-XD/6kUVKso0pFhmaqLaprwLEOFFS6T7rFoeeXG6RCRmi8/zL2+hKxjWawK2AoY4wuBx/xZs+OaZfjm39OtWkBA== + integrity: sha512-7PFSyOl5EOrUMDiG6K1z7HjhGiPptxj8JXO1fwlMu8KhSpd3EcvgWcVyn2K7cadIakIF5bh74divfXR2kI7+aQ== tarball: 'file:projects/digital-twins-core.tgz' version: 0.0.0 'file:projects/eslint-plugin-azure-sdk.tgz': @@ -10292,6 +10310,7 @@ packages: eslint-plugin-no-only-tests: 2.4.0 eslint-plugin-promise: 4.2.1 esm: 3.2.25 + https-proxy-agent: 5.0.0 is-buffer: 2.0.4 jssha: 3.1.2 karma: 5.1.1 @@ -10326,7 +10345,7 @@ packages: dev: false name: '@rush-temp/event-hubs' resolution: - integrity: sha512-ULvDOj0ReZbE5TXzESvryoDyHGbatat+qiW4fnfMvLPdvpSvuWBg4GmOIgj8kkzPkV4HoJmUE3BTSoANzGRSXA== + integrity: sha512-ilsDcySQmnvO8QTZ0WnLoxJvlG91FhasAa5cxQNvFZTOqM8E38n4AQEXx5n2RqELi09DETy8Y8y2M2ueC9c9Gg== tarball: 'file:projects/event-hubs.tgz' version: 0.0.0 'file:projects/event-processor-host.tgz': @@ -10508,7 +10527,7 @@ packages: dev: false name: '@rush-temp/eventhubs-checkpointstore-blob' resolution: - integrity: sha512-zKXMbZZ/qO6r/TdAW8dNOmT9jU/cxROlE2uqTzTJ7ydgx7Ii5SHLfYVLVal6rpk/xEhgrTfhLQ8kCeOhhVBWPw== + integrity: sha512-GOyNtbMLJeIBYIupHwtltpdM5U8SDMfLorK6vThYBxPnfsHpYrmsruOl331/w50SFWouhKbsROY39kumYvozFQ== tarball: 'file:projects/eventhubs-checkpointstore-blob.tgz' version: 0.0.0 'file:projects/identity.tgz': @@ -10635,7 +10654,7 @@ packages: dev: false name: '@rush-temp/keyvault-admin' resolution: - integrity: sha512-zlghZSc7DbMyVwjv4Z5aT2ZXVp6PqMW/+Eg2WoStevLDkIkWEL3+XYQjv6R3qRJioCsMOXeC5oZ0AbdNVXssjA== + integrity: sha512-6mOQjAHnCOHmlxRR7Ku4Vz07pTlxLal4ePFqKWUDUunw9nEqCwDOsJSBS6L3uVc+6dM1d0LfhRQE9Em0aTTXjw== tarball: 'file:projects/keyvault-admin.tgz' version: 0.0.0 'file:projects/keyvault-certificates.tgz': @@ -10701,7 +10720,7 @@ packages: dev: false name: '@rush-temp/keyvault-certificates' resolution: - integrity: sha512-VUN+2FwAABk8mJ6VEsrqLPtID7DUhz9LYuJTwaOmnGwttoSVb2FfAtF63kz9mgODd0kAngw4giRKWKwp58Bbsw== + integrity: sha512-PWtyIwmJ6+ef0crYzvlR32v84QTGm4L9S02iJ7EaiJ9GwEEJBEiufpBqM5O40BSXbGEoEAMZEPP+jn9qnFP4Eg== tarball: 'file:projects/keyvault-certificates.tgz' version: 0.0.0 'file:projects/keyvault-common.tgz': @@ -10779,7 +10798,7 @@ packages: dev: false name: '@rush-temp/keyvault-keys' resolution: - integrity: sha512-Na+m4N9KIMQN764GcMVCg8eFyCLezBxc7J6X1g0/Oxd+i9E4Wnr0cVbXhH36QmaxdSNR4JbgeP2wE/Swxio8NQ== + integrity: sha512-JjUBTPWmDNrW4rlQv7bgDXjMmaTdp5Obg0vQaQC9SciD+GEzYSdnnQPqR3Pn7a6ODxtW3fi8OBspFzy/wPHorA== tarball: 'file:projects/keyvault-keys.tgz' version: 0.0.0 'file:projects/keyvault-secrets.tgz': @@ -10845,7 +10864,7 @@ packages: dev: false name: '@rush-temp/keyvault-secrets' resolution: - integrity: sha512-z7+hE+5gm4X/0ZZZ96PtdLA+i34Rh0vPfSzLp2h+eOk1L+HEU0cck6a4w5n1Mv+okRt1ufkriVo5/y2PMV8IQg== + integrity: sha512-quiZqigPTDfMpmk25Iv8KFXEzx2ikDsI14yXMw4WO+MGW7FsV9V1ePLB5tJMobsbHhsGjbv78LgrbYDAZP9VIQ== tarball: 'file:projects/keyvault-secrets.tgz' version: 0.0.0 'file:projects/logger.tgz': @@ -10993,7 +11012,7 @@ packages: dev: false name: '@rush-temp/schema-registry-avro' resolution: - integrity: sha512-h+J/B+f/NpSSG78fT5fsLZ4jzupKaWe4Z11MGUFIcX/+toKzVOkojv61rRGLoHwaINT8J6IxyMkr6porRfi/AQ== + integrity: sha512-EqsEETl8kUKb2VWG+N/ZdyC0n0ZYfd5DV8k9CXVo2HCGMNTU1oPK+3fK449s3uu/eBivECV47Uysi5J5OyZ4kQ== tarball: 'file:projects/schema-registry-avro.tgz' version: 0.0.0 'file:projects/schema-registry.tgz': @@ -11050,7 +11069,7 @@ packages: dev: false name: '@rush-temp/schema-registry' resolution: - integrity: sha512-PjcvNh7JvVDINF3WfOMoFerV+qUO8M++D0eiTMkSG6TDZ6Jo2J1DQo3XnyTp0JchQXX0sPONbPg6+xaxUlAEZg== + integrity: sha512-mF+8c4mn7AsFrfsDdW1oSdJyh04/iCYalc+F77XFKWFfm2HhFYa7qIouBFd62Fl/vdReyBRT2saKMhX39ZYxVQ== tarball: 'file:projects/schema-registry.tgz' version: 0.0.0 'file:projects/search-documents.tgz': @@ -11193,7 +11212,7 @@ packages: dev: false name: '@rush-temp/service-bus' resolution: - integrity: sha512-GttMlKcf8CpDHVSSRODp/uuKowhaJNnglA4vKroX+wY0pirghFzT800NUDOCBLv08Uzpcu0q+gac5CKwHI/0xg== + integrity: sha512-947ihqmHYOF4tude9GzyRGzrYeNOXNcNIVXMJyPHgV06T+WP/snZavFpC0YrS2RBRvrIqWRQh4kId2e0VfUYcg== tarball: 'file:projects/service-bus.tgz' version: 0.0.0 'file:projects/storage-blob-changefeed.tgz': @@ -11257,7 +11276,7 @@ packages: dev: false name: '@rush-temp/storage-blob-changefeed' resolution: - integrity: sha512-ROl8a6HVH3NC/mKyAXgzrgeJ35Yk8tujcoeLfmjHJGn8tQQt82yNNqnxejmAB8huCAXVzBlMK5BKdSQh4YHMFA== + integrity: sha512-kv8o6uulABfvnyr/aayD1zj+B3xVWvDswYWmzqB8I5uFBEoolZzHIpxMDVL3SGRgG5gF02Ty9V4WJ52CblAINw== tarball: 'file:projects/storage-blob-changefeed.tgz' version: 0.0.0 'file:projects/storage-blob.tgz': @@ -11267,6 +11286,7 @@ packages: '@microsoft/api-extractor': 7.7.11 '@opentelemetry/api': 0.10.2 '@rollup/plugin-commonjs': 11.0.2_rollup@1.32.1 + '@rollup/plugin-json': 4.1.0_rollup@1.32.1 '@rollup/plugin-multi-entry': 3.0.1_rollup@1.32.1 '@rollup/plugin-node-resolve': 8.4.0_rollup@1.32.1 '@rollup/plugin-replace': 2.3.3_rollup@1.32.1 @@ -11319,7 +11339,7 @@ packages: dev: false name: '@rush-temp/storage-blob' resolution: - integrity: sha512-z/4kqI624+5E1514Aj58uljNFQdCOuw2eXAg9zxTeBoenR0g6DrmpU6fyMcmAKNKvSGPb5DYZQUXczYOLsllCQ== + integrity: sha512-S5qF/wHpHCaW7CTfWYjq/eAHOGa23wDaYiMbQqElQO1WQsERQfGWIH/bf1C0Mg1HQYfKjH7UxmTfvt87Iouxjg== tarball: 'file:projects/storage-blob.tgz' version: 0.0.0 'file:projects/storage-file-datalake.tgz': @@ -11329,6 +11349,7 @@ packages: '@microsoft/api-extractor': 7.7.11 '@opentelemetry/api': 0.10.2 '@rollup/plugin-commonjs': 11.0.2_rollup@1.32.1 + '@rollup/plugin-json': 4.1.0_rollup@1.32.1 '@rollup/plugin-multi-entry': 3.0.1_rollup@1.32.1 '@rollup/plugin-node-resolve': 8.4.0_rollup@1.32.1 '@rollup/plugin-replace': 2.3.3_rollup@1.32.1 @@ -11386,7 +11407,7 @@ packages: dev: false name: '@rush-temp/storage-file-datalake' resolution: - integrity: sha512-rt+5OdfynZSXLwRDCxPxnKEwEcAUPgrD8Z4ar+qhEtdP8NxwCrF4j0qppNlRi7nM/RFknAWoLlwX6admnjg4pQ== + integrity: sha512-8G5CDvfjNiz1ly3YxJbwq6ISOYpL++mnO4J/jB5zgm9IVn66wrn1iaf4xvKo4C/vdaIU/xddWjBH4yHwo1AzcQ== tarball: 'file:projects/storage-file-datalake.tgz' version: 0.0.0 'file:projects/storage-file-share.tgz': @@ -11566,9 +11587,54 @@ packages: dev: false name: '@rush-temp/storage-queue' resolution: - integrity: sha512-IHSQC996aAVBVtrsz3xCBeysHjXB1Q2zCoMxkj80n7hSYv9wEek9aEQnUHdnF3Gub0okjB756oCk4qaPb0I+Gg== + integrity: sha512-peAeDFKOdGTC2cqKuruSMGA0LCJ2peTAn1UPrf0T+AQQWmhyqJqtMRzdDAbd9gTx/6CGKolRoSfo0Zwtj5apRw== tarball: 'file:projects/storage-queue.tgz' version: 0.0.0 + 'file:projects/synapse-accesscontrol.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.1 + typescript: 3.9.7 + uglify-js: 3.10.2 + dev: false + name: '@rush-temp/synapse-accesscontrol' + resolution: + integrity: sha512-TFY+QsYvHOyN+ZEQZzCBG75dfYFS2TN+EwPdS1vFmjEbFvtHJqyP4/EhQEx7F1xJ8pvB/7KkFNxFdz00aIY/Kg== + tarball: 'file:projects/synapse-accesscontrol.tgz' + version: 0.0.0 + 'file:projects/synapse-artifacts.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.1 + typescript: 3.9.7 + uglify-js: 3.10.2 + dev: false + name: '@rush-temp/synapse-artifacts' + resolution: + integrity: sha512-z1Mzvmykm4QueMmQH8u3rs2Zo3NKDB6DOUDZpbxQd+uBfJkpCLtF3dVeweKusU1kUZh0lwB5CyEIr48QgGm/3w== + tarball: 'file:projects/synapse-artifacts.tgz' + version: 0.0.0 + 'file:projects/synapse-spark.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.1 + typescript: 3.9.7 + uglify-js: 3.10.2 + dev: false + name: '@rush-temp/synapse-spark' + resolution: + integrity: sha512-N6icSSXfZHpeeQbCHi/Yo8e+rpyLYkShphRxHr8ftxQFSPin80kDZHn/44rwOwm0Uv9TpAJjlYvs+yzzljT7EA== + tarball: 'file:projects/synapse-spark.tgz' + version: 0.0.0 'file:projects/template.tgz': dependencies: '@azure/core-tracing': 1.0.0-preview.9 @@ -11785,6 +11851,9 @@ specifiers: '@rush-temp/storage-file-share': 'file:./projects/storage-file-share.tgz' '@rush-temp/storage-internal-avro': 'file:./projects/storage-internal-avro.tgz' '@rush-temp/storage-queue': 'file:./projects/storage-queue.tgz' + '@rush-temp/synapse-accesscontrol': 'file:./projects/synapse-accesscontrol.tgz' + '@rush-temp/synapse-artifacts': 'file:./projects/synapse-artifacts.tgz' + '@rush-temp/synapse-spark': 'file:./projects/synapse-spark.tgz' '@rush-temp/template': 'file:./projects/template.tgz' '@rush-temp/test-utils-perfstress': 'file:./projects/test-utils-perfstress.tgz' '@rush-temp/test-utils-recorder': 'file:./projects/test-utils-recorder.tgz' diff --git a/dataplane.code-workspace b/dataplane.code-workspace index fd31357c4d8e..2700987f22ef 100644 --- a/dataplane.code-workspace +++ b/dataplane.code-workspace @@ -136,6 +136,18 @@ "name": "storage-queue", "path": "sdk/storage/storage-queue" }, + { + "name": "synapse-accesscontrol", + "path": "sdk/synapse/synapse-accesscontrol" + }, + { + "name": "synapse-artifacts", + "path": "sdk/synapse/synapse-artifacts" + }, + { + "name": "synapse-spark", + "path": "sdk/synapse/synapse-spark" + }, { "name": "tables", "path": "sdk/tables/data-tables" diff --git a/rush.json b/rush.json index 579d19abf65e..1a98b9a64b55 100644 --- a/rush.json +++ b/rush.json @@ -552,6 +552,21 @@ "projectFolder": "sdk/storage/storage-queue", "versionPolicyName": "client" }, + { + "packageName": "@azure/synapse-accesscontrol", + "projectFolder": "sdk/synapse/synapse-accesscontrol", + "versionPolicyName": "client" + }, + { + "packageName": "@azure/synapse-artifacts", + "projectFolder": "sdk/synapse/synapse-artifacts", + "versionPolicyName": "client" + }, + { + "packageName": "@azure/synapse-spark", + "projectFolder": "sdk/synapse/synapse-spark", + "versionPolicyName": "client" + }, { "packageName": "@azure/data-tables", "projectFolder": "sdk/tables/data-tables", From 4ad59be9266b7b4b8c3fe3840097df56943a9402 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 30 Nov 2020 13:59:11 +1300 Subject: [PATCH 02/28] WIP --- common/config/rush/pnpm-lock.yaml | 34 + rush.json | 10 + sdk/synapse/synapse-accesscontrol/README.md | 27 + .../synapse-accesscontrol/api-extractor.json | 18 + .../synapse-accesscontrol/package.json | 52 + .../review/synapse-accesscontrol.api.md | 163 + .../synapse-accesscontrol/rollup.config.js | 31 + .../src/accessControlClient.ts | 367 + .../src/accessControlClientContext.ts | 52 + .../synapse-accesscontrol/src/index.ts | 4 + .../synapse-accesscontrol/src/models/index.ts | 282 + .../src/models/mappers.ts | 208 + .../src/models/parameters.ts | 126 + .../synapse-accesscontrol/tsconfig.json | 20 + sdk/synapse/synapse-artifacts/README.md | 27 + .../synapse-artifacts/api-extractor.json | 18 + sdk/synapse/synapse-artifacts/package.json | 53 + .../review/synapse-artifacts.api.md | 6619 +++++ .../synapse-artifacts/rollup.config.js | 31 + .../synapse-artifacts/src/artifactsClient.ts | 71 + .../src/artifactsClientContext.ts | 63 + sdk/synapse/synapse-artifacts/src/index.ts | 4 + .../src/lro/azureAsyncOperationStrategy.ts | 231 + .../src/lro/bodyPollingStrategy.ts | 54 + .../synapse-artifacts/src/lro/constants.ts | 1 + .../synapse-artifacts/src/lro/index.ts | 15 + .../src/lro/locationStrategy.ts | 67 + .../synapse-artifacts/src/lro/lroPolicy.ts | 36 + .../synapse-artifacts/src/lro/lroPoller.ts | 147 + .../synapse-artifacts/src/lro/models.ts | 67 + .../synapse-artifacts/src/lro/operation.ts | 74 + .../src/lro/passthroughStrategy.ts | 22 + .../synapse-artifacts/src/lro/requestUtils.ts | 109 + .../synapse-artifacts/src/models/index.ts | 15974 ++++++++++++ .../synapse-artifacts/src/models/mappers.ts | 21533 ++++++++++++++++ .../src/models/parameters.ts | 400 + .../src/operations/bigDataPools.ts | 89 + .../src/operations/dataFlow.ts | 370 + .../src/operations/dataFlowDebugSession.ts | 368 + .../src/operations/dataset.ts | 370 + .../synapse-artifacts/src/operations/index.ts | 16 + .../src/operations/integrationRuntimes.ts | 94 + .../src/operations/linkedService.ts | 373 + .../src/operations/notebook.ts | 488 + .../src/operations/pipeline.ts | 415 + .../src/operations/pipelineRun.ts | 182 + .../src/operations/sparkJobDefinition.ts | 458 + .../src/operations/sqlPools.ts | 89 + .../src/operations/sqlScript.ts | 330 + .../src/operations/trigger.ts | 571 + .../src/operations/triggerRun.ts | 144 + .../src/operations/workspace.ts | 54 + .../operations/workspaceGitRepoManagement.ts | 66 + sdk/synapse/synapse-artifacts/tsconfig.json | 20 + .../synapse-managed-endpoints/README.md | 27 + .../api-extractor.json | 18 + .../synapse-managed-endpoints/package.json | 52 + .../review/synapse-managed-endpoints.api.md | 99 + .../rollup.config.js | 31 + .../synapse-managed-endpoints/src/index.ts | 4 + .../src/managedPrivateEndpointsClient.ts | 24 + .../managedPrivateEndpointsClientContext.ts | 52 + .../src/models/index.ts | 183 + .../src/models/mappers.ts | 136 + .../src/models/parameters.ts | 93 + .../src/operations/index.ts | 1 + .../src/operations/managedPrivateEndpoints.ts | 276 + .../synapse-managed-endpoints/tsconfig.json | 20 + sdk/synapse/synapse-monitoring/README.md | 27 + .../synapse-monitoring/api-extractor.json | 18 + sdk/synapse/synapse-monitoring/package.json | 48 + .../review/synapse-monitoring.api.md | 116 + .../synapse-monitoring/rollup.config.js | 31 + sdk/synapse/synapse-monitoring/src/index.ts | 3 + .../synapse-monitoring/src/models/index.ts | 108 + .../synapse-monitoring/src/models/mappers.ts | 155 + .../src/models/parameters.ts | 81 + .../src/monitoringClient.ts | 24 + .../src/monitoringClientContext.ts | 52 + .../src/operations/index.ts | 1 + .../src/operations/monitoring.ts | 92 + sdk/synapse/synapse-monitoring/tsconfig.json | 20 + sdk/synapse/synapse-spark/README.md | 27 + sdk/synapse/synapse-spark/api-extractor.json | 18 + sdk/synapse/synapse-spark/package.json | 48 + .../synapse-spark/review/synapse-spark.api.md | 582 + sdk/synapse/synapse-spark/rollup.config.js | 31 + sdk/synapse/synapse-spark/src/index.ts | 3 + sdk/synapse/synapse-spark/src/models/index.ts | 762 + .../synapse-spark/src/models/mappers.ts | 1156 + .../synapse-spark/src/models/parameters.ts | 148 + .../synapse-spark/src/operations/index.ts | 2 + .../src/operations/sparkBatch.ts | 171 + .../src/operations/sparkSession.ts | 364 + sdk/synapse/synapse-spark/src/sparkClient.ts | 28 + .../synapse-spark/src/sparkClientContext.ts | 61 + sdk/synapse/synapse-spark/tsconfig.json | 20 + 97 files changed, 56720 insertions(+) create mode 100644 sdk/synapse/synapse-accesscontrol/README.md create mode 100644 sdk/synapse/synapse-accesscontrol/api-extractor.json create mode 100644 sdk/synapse/synapse-accesscontrol/package.json create mode 100644 sdk/synapse/synapse-accesscontrol/review/synapse-accesscontrol.api.md create mode 100644 sdk/synapse/synapse-accesscontrol/rollup.config.js create mode 100644 sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts create mode 100644 sdk/synapse/synapse-accesscontrol/src/accessControlClientContext.ts create mode 100644 sdk/synapse/synapse-accesscontrol/src/index.ts create mode 100644 sdk/synapse/synapse-accesscontrol/src/models/index.ts create mode 100644 sdk/synapse/synapse-accesscontrol/src/models/mappers.ts create mode 100644 sdk/synapse/synapse-accesscontrol/src/models/parameters.ts create mode 100644 sdk/synapse/synapse-accesscontrol/tsconfig.json create mode 100644 sdk/synapse/synapse-artifacts/README.md create mode 100644 sdk/synapse/synapse-artifacts/api-extractor.json create mode 100644 sdk/synapse/synapse-artifacts/package.json create mode 100644 sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md create mode 100644 sdk/synapse/synapse-artifacts/rollup.config.js create mode 100644 sdk/synapse/synapse-artifacts/src/artifactsClient.ts create mode 100644 sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts create mode 100644 sdk/synapse/synapse-artifacts/src/index.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/constants.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/index.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/models.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/operation.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts create mode 100644 sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts create mode 100644 sdk/synapse/synapse-artifacts/src/models/index.ts create mode 100644 sdk/synapse/synapse-artifacts/src/models/mappers.ts create mode 100644 sdk/synapse/synapse-artifacts/src/models/parameters.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/dataset.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/index.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/linkedService.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/notebook.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/pipeline.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/trigger.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/workspace.ts create mode 100644 sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts create mode 100644 sdk/synapse/synapse-artifacts/tsconfig.json create mode 100644 sdk/synapse/synapse-managed-endpoints/README.md create mode 100644 sdk/synapse/synapse-managed-endpoints/api-extractor.json create mode 100644 sdk/synapse/synapse-managed-endpoints/package.json create mode 100644 sdk/synapse/synapse-managed-endpoints/review/synapse-managed-endpoints.api.md create mode 100644 sdk/synapse/synapse-managed-endpoints/rollup.config.js create mode 100644 sdk/synapse/synapse-managed-endpoints/src/index.ts create mode 100644 sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClient.ts create mode 100644 sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClientContext.ts create mode 100644 sdk/synapse/synapse-managed-endpoints/src/models/index.ts create mode 100644 sdk/synapse/synapse-managed-endpoints/src/models/mappers.ts create mode 100644 sdk/synapse/synapse-managed-endpoints/src/models/parameters.ts create mode 100644 sdk/synapse/synapse-managed-endpoints/src/operations/index.ts create mode 100644 sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts create mode 100644 sdk/synapse/synapse-managed-endpoints/tsconfig.json create mode 100644 sdk/synapse/synapse-monitoring/README.md create mode 100644 sdk/synapse/synapse-monitoring/api-extractor.json create mode 100644 sdk/synapse/synapse-monitoring/package.json create mode 100644 sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md create mode 100644 sdk/synapse/synapse-monitoring/rollup.config.js create mode 100644 sdk/synapse/synapse-monitoring/src/index.ts create mode 100644 sdk/synapse/synapse-monitoring/src/models/index.ts create mode 100644 sdk/synapse/synapse-monitoring/src/models/mappers.ts create mode 100644 sdk/synapse/synapse-monitoring/src/models/parameters.ts create mode 100644 sdk/synapse/synapse-monitoring/src/monitoringClient.ts create mode 100644 sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts create mode 100644 sdk/synapse/synapse-monitoring/src/operations/index.ts create mode 100644 sdk/synapse/synapse-monitoring/src/operations/monitoring.ts create mode 100644 sdk/synapse/synapse-monitoring/tsconfig.json create mode 100644 sdk/synapse/synapse-spark/README.md create mode 100644 sdk/synapse/synapse-spark/api-extractor.json create mode 100644 sdk/synapse/synapse-spark/package.json create mode 100644 sdk/synapse/synapse-spark/review/synapse-spark.api.md create mode 100644 sdk/synapse/synapse-spark/rollup.config.js create mode 100644 sdk/synapse/synapse-spark/src/index.ts create mode 100644 sdk/synapse/synapse-spark/src/models/index.ts create mode 100644 sdk/synapse/synapse-spark/src/models/mappers.ts create mode 100644 sdk/synapse/synapse-spark/src/models/parameters.ts create mode 100644 sdk/synapse/synapse-spark/src/operations/index.ts create mode 100644 sdk/synapse/synapse-spark/src/operations/sparkBatch.ts create mode 100644 sdk/synapse/synapse-spark/src/operations/sparkSession.ts create mode 100644 sdk/synapse/synapse-spark/src/sparkClient.ts create mode 100644 sdk/synapse/synapse-spark/src/sparkClientContext.ts create mode 100644 sdk/synapse/synapse-spark/tsconfig.json diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index b2fd9a8707a4..d5995bc0eafa 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -49,6 +49,8 @@ dependencies: '@rush-temp/storage-queue': 'file:projects/storage-queue.tgz' '@rush-temp/synapse-accesscontrol': 'file:projects/synapse-accesscontrol.tgz' '@rush-temp/synapse-artifacts': 'file:projects/synapse-artifacts.tgz' + '@rush-temp/synapse-managed-endpoints': 'file:projects/synapse-managed-endpoints.tgz' + '@rush-temp/synapse-monitoring': 'file:projects/synapse-monitoring.tgz' '@rush-temp/synapse-spark': 'file:projects/synapse-spark.tgz' '@rush-temp/template': 'file:projects/template.tgz' '@rush-temp/test-utils-perfstress': 'file:projects/test-utils-perfstress.tgz' @@ -11620,6 +11622,36 @@ packages: integrity: sha512-z1Mzvmykm4QueMmQH8u3rs2Zo3NKDB6DOUDZpbxQd+uBfJkpCLtF3dVeweKusU1kUZh0lwB5CyEIr48QgGm/3w== tarball: 'file:projects/synapse-artifacts.tgz' version: 0.0.0 + 'file:projects/synapse-managed-endpoints.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.1 + typescript: 3.9.7 + uglify-js: 3.10.2 + dev: false + name: '@rush-temp/synapse-managed-endpoints' + resolution: + integrity: sha512-bF8Y9PRUZY4nG9Xeu/yxWXSabNdd7XJPbFOwzzMyRX+Ti37Kidp3FBI90sxBRdCSVFNXPHUQaDeKXQ1LDvAC8w== + tarball: 'file:projects/synapse-managed-endpoints.tgz' + version: 0.0.0 + 'file:projects/synapse-monitoring.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.1 + typescript: 3.9.7 + uglify-js: 3.10.2 + dev: false + name: '@rush-temp/synapse-monitoring' + resolution: + integrity: sha512-GMO/9OUshXiUwhVVPygWF0/pFk9qLgckZWYgOhYh4locL6PRqTcMLOFeq4TVjmzqDv0Fd94MMcirJMhE4I8tFA== + tarball: 'file:projects/synapse-monitoring.tgz' + version: 0.0.0 'file:projects/synapse-spark.tgz': dependencies: '@microsoft/api-extractor': 7.7.11 @@ -11853,6 +11885,8 @@ specifiers: '@rush-temp/storage-queue': 'file:./projects/storage-queue.tgz' '@rush-temp/synapse-accesscontrol': 'file:./projects/synapse-accesscontrol.tgz' '@rush-temp/synapse-artifacts': 'file:./projects/synapse-artifacts.tgz' + '@rush-temp/synapse-managed-endpoints': 'file:./projects/synapse-managed-endpoints.tgz' + '@rush-temp/synapse-monitoring': 'file:./projects/synapse-monitoring.tgz' '@rush-temp/synapse-spark': 'file:./projects/synapse-spark.tgz' '@rush-temp/template': 'file:./projects/template.tgz' '@rush-temp/test-utils-perfstress': 'file:./projects/test-utils-perfstress.tgz' diff --git a/rush.json b/rush.json index 1a98b9a64b55..90a57b6098e3 100644 --- a/rush.json +++ b/rush.json @@ -562,6 +562,16 @@ "projectFolder": "sdk/synapse/synapse-artifacts", "versionPolicyName": "client" }, + { + "packageName": "@azure/synapse-managed-endpoints", + "projectFolder": "sdk/synapse/synapse-managed-endpoints", + "versionPolicyName": "client" + }, + { + "packageName": "@azure/synapse-monitoring", + "projectFolder": "sdk/synapse/synapse-monitoring", + "versionPolicyName": "client" + }, { "packageName": "@azure/synapse-spark", "projectFolder": "sdk/synapse/synapse-spark", diff --git a/sdk/synapse/synapse-accesscontrol/README.md b/sdk/synapse/synapse-accesscontrol/README.md new file mode 100644 index 000000000000..5da1483fc507 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/README.md @@ -0,0 +1,27 @@ +## Azure AccessControlClient SDK for JavaScript + +This package contains an isomorphic SDK for AccessControlClient. + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +### How to Install + +```bash +npm install @azure/synapse-accesscontrol +``` + +### How to use + +#### Sample code + +Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file diff --git a/sdk/synapse/synapse-accesscontrol/api-extractor.json b/sdk/synapse/synapse-accesscontrol/api-extractor.json new file mode 100644 index 000000000000..5fb5b21b56f2 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./esm/index.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-accesscontrol/package.json b/sdk/synapse/synapse-accesscontrol/package.json new file mode 100644 index 000000000000..665dac753508 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/package.json @@ -0,0 +1,52 @@ +{ + "name": "@azure/synapse-accesscontrol", + "author": "Microsoft Corporation", + "description": "A generated SDK for AccessControlClient.", + "version": "1.0.0", + "dependencies": { + "@azure/core-paging": "^1.1.1", + "@azure/core-http": "^1.2.0", + "tslib": "^2.0.0" + }, + "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "license": "MIT", + "main": "./dist/synapse-accesscontrol.js", + "module": "./esm/index.js", + "types": "./esm/index.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js", + "repository": { + "type": "git", + "url": "https://github.com/Azure/azure-sdk-for-js.git" + }, + "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "src/**/*.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-accesscontrol.js.map'\" -o ./dist/synapse-accesscontrol.min.js ./dist/synapse-accesscontrol.js", + "prepack": "npm install && npm run build", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-accesscontrol/review/synapse-accesscontrol.api.md b/sdk/synapse/synapse-accesscontrol/review/synapse-accesscontrol.api.md new file mode 100644 index 000000000000..47289f4c84e5 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/review/synapse-accesscontrol.api.md @@ -0,0 +1,163 @@ +## API Report File for "@azure/synapse-accesscontrol" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; + +// @public (undocumented) +export class AccessControlClient extends AccessControlClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: AccessControlClientOptionalParams); + createRoleAssignment(createRoleAssignmentOptions: RoleAssignmentOptions, options?: coreHttp.OperationOptions): Promise; + deleteRoleAssignmentById(roleAssignmentId: string, options?: coreHttp.OperationOptions): Promise; + getCallerRoleAssignments(options?: coreHttp.OperationOptions): Promise; + getRoleAssignmentById(roleAssignmentId: string, options?: coreHttp.OperationOptions): Promise; + getRoleAssignments(options?: AccessControlClientGetRoleAssignmentsOptionalParams): Promise; + getRoleDefinitionById(roleId: string, options?: coreHttp.OperationOptions): Promise; + listRoleDefinitions(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; +} + +// @public (undocumented) +export class AccessControlClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: AccessControlClientOptionalParams); + // (undocumented) + apiVersion: string; + // (undocumented) + endpoint: string; +} + +// @public +export type AccessControlClientCreateRoleAssignmentResponse = RoleAssignmentDetails & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RoleAssignmentDetails; + }; +}; + +// @public +export type AccessControlClientGetCallerRoleAssignmentsResponse = { + body: string[]; + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: string[]; + }; +}; + +// @public +export type AccessControlClientGetRoleAssignmentByIdResponse = RoleAssignmentDetails & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RoleAssignmentDetails; + }; +}; + +// @public +export interface AccessControlClientGetRoleAssignmentsHeaders { + xMsContinuation?: string; +} + +// @public +export interface AccessControlClientGetRoleAssignmentsOptionalParams extends coreHttp.OperationOptions { + continuationToken?: string; + principalId?: string; + roleId?: string; +} + +// @public +export type AccessControlClientGetRoleAssignmentsResponse = AccessControlClientGetRoleAssignmentsHeaders & RoleAssignmentDetails[] & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RoleAssignmentDetails[]; + parsedHeaders: AccessControlClientGetRoleAssignmentsHeaders; + }; +}; + +// @public +export type AccessControlClientGetRoleDefinitionByIdResponse = SynapseRole & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SynapseRole; + }; +}; + +// @public +export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RolesListResponse; + }; +}; + +// @public +export type AccessControlClientGetRoleDefinitionsResponse = RolesListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RolesListResponse; + }; +}; + +// @public +export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { + apiVersion?: string; + endpoint?: string; +} + +// @public +export interface ErrorContract { + error?: ErrorResponse; +} + +// @public (undocumented) +export interface ErrorDetail { + // (undocumented) + code: string; + // (undocumented) + message: string; + // (undocumented) + target?: string; +} + +// @public (undocumented) +export interface ErrorResponse { + // (undocumented) + code: string; + // (undocumented) + details?: ErrorDetail[]; + // (undocumented) + message: string; + // (undocumented) + target?: string; +} + +// @public +export interface RoleAssignmentDetails { + id?: string; + principalId?: string; + roleId?: string; +} + +// @public +export interface RoleAssignmentOptions { + principalId: string; + roleId: string; +} + +// @public +export interface RolesListResponse { + nextLink?: string; + value: SynapseRole[]; +} + +// @public +export interface SynapseRole { + id?: string; + isBuiltIn: boolean; + name?: string; +} + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-accesscontrol/rollup.config.js b/sdk/synapse/synapse-accesscontrol/rollup.config.js new file mode 100644 index 000000000000..25466ddd5767 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/rollup.config.js @@ -0,0 +1,31 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./esm/accessControlClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/synapse-accesscontrol.js", + format: "umd", + name: "Azure.SynapseAccesscontrol", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [nodeResolve({ module: true }), sourcemaps()] +}; + +export default config; diff --git a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts new file mode 100644 index 000000000000..0a7ff0c8a2e3 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts @@ -0,0 +1,367 @@ +import * as coreHttp from "@azure/core-http"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as Parameters from "./models/parameters"; +import * as Mappers from "./models/mappers"; +import { AccessControlClientContext } from "./accessControlClientContext"; +import { + AccessControlClientOptionalParams, + SynapseRole, + AccessControlClientGetRoleDefinitionsResponse, + AccessControlClientGetRoleDefinitionByIdResponse, + RoleAssignmentOptions, + AccessControlClientCreateRoleAssignmentResponse, + AccessControlClientGetRoleAssignmentsOptionalParams, + AccessControlClientGetRoleAssignmentsResponse, + AccessControlClientGetRoleAssignmentByIdResponse, + AccessControlClientGetCallerRoleAssignmentsResponse, + AccessControlClientGetRoleDefinitionsNextResponse +} from "./models"; + +export class AccessControlClient extends AccessControlClientContext { + /** + * Initializes a new instance of the AccessControlClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: AccessControlClientOptionalParams + ) { + super(credentials, endpoint, options); + } + + /** + * List roles. + * @param options The options parameters. + */ + public listRoleDefinitions( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getRoleDefinitionsPagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getRoleDefinitionsPagingPage(options); + } + }; + } + + private async *getRoleDefinitionsPagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getRoleDefinitions(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getRoleDefinitionsNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getRoleDefinitionsPagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getRoleDefinitionsPagingPage(options)) { + yield* page; + } + } + + /** + * List roles. + * @param options The options parameters. + */ + private _getRoleDefinitions( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.sendOperationRequest( + operationArguments, + getRoleDefinitionsOperationSpec + ) as Promise; + } + + /** + * Get role by role Id. + * @param roleId Synapse Built-In Role Id. + * @param options The options parameters. + */ + getRoleDefinitionById( + roleId: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + roleId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.sendOperationRequest( + operationArguments, + getRoleDefinitionByIdOperationSpec + ) as Promise; + } + + /** + * Create role assignment. + * @param createRoleAssignmentOptions Details of role id and object id. + * @param options The options parameters. + */ + createRoleAssignment( + createRoleAssignmentOptions: RoleAssignmentOptions, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + createRoleAssignmentOptions, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.sendOperationRequest( + operationArguments, + createRoleAssignmentOperationSpec + ) as Promise; + } + + /** + * List role assignments. + * @param options The options parameters. + */ + getRoleAssignments( + options?: AccessControlClientGetRoleAssignmentsOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.sendOperationRequest( + operationArguments, + getRoleAssignmentsOperationSpec + ) as Promise; + } + + /** + * Get role assignment by role assignment Id. + * @param roleAssignmentId The ID of the role assignment. + * @param options The options parameters. + */ + getRoleAssignmentById( + roleAssignmentId: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + roleAssignmentId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.sendOperationRequest( + operationArguments, + getRoleAssignmentByIdOperationSpec + ) as Promise; + } + + /** + * Delete role assignment by role assignment Id. + * @param roleAssignmentId The ID of the role assignment. + * @param options The options parameters. + */ + deleteRoleAssignmentById( + roleAssignmentId: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + roleAssignmentId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.sendOperationRequest( + operationArguments, + deleteRoleAssignmentByIdOperationSpec + ) as Promise; + } + + /** + * List role assignments of the caller. + * @param options The options parameters. + */ + getCallerRoleAssignments( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.sendOperationRequest( + operationArguments, + getCallerRoleAssignmentsOperationSpec + ) as Promise; + } + + /** + * GetRoleDefinitionsNext + * @param nextLink The nextLink from the previous successful call to the GetRoleDefinitions method. + * @param options The options parameters. + */ + private _getRoleDefinitionsNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.sendOperationRequest( + operationArguments, + getRoleDefinitionsNextOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getRoleDefinitionsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roles", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RolesListResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getRoleDefinitionByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roles/{roleId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SynapseRole + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleId], + headerParameters: [Parameters.accept], + serializer +}; +const createRoleAssignmentOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.RoleAssignmentDetails + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + requestBody: Parameters.createRoleAssignmentOptions, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "RoleAssignmentDetails" } + } + } + }, + headersMapper: Mappers.AccessControlClientGetRoleAssignmentsHeaders + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [ + Parameters.apiVersion, + Parameters.roleId1, + Parameters.principalId + ], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.continuationToken], + serializer +}; +const getRoleAssignmentByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments/{roleAssignmentId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RoleAssignmentDetails + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleAssignmentId], + headerParameters: [Parameters.accept], + serializer +}; +const deleteRoleAssignmentByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments/{roleAssignmentId}", + httpMethod: "DELETE", + responses: { + 200: {}, + 204: {}, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleAssignmentId], + headerParameters: [Parameters.accept], + serializer +}; +const getCallerRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/getMyAssignedRoles", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Sequence", element: { type: { name: "String" } } } + } + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getRoleDefinitionsNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RolesListResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-accesscontrol/src/accessControlClientContext.ts b/sdk/synapse/synapse-accesscontrol/src/accessControlClientContext.ts new file mode 100644 index 000000000000..3bea87af164d --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/src/accessControlClientContext.ts @@ -0,0 +1,52 @@ +import * as coreHttp from "@azure/core-http"; +import { AccessControlClientOptionalParams } from "./models"; + +const packageName = "@azure/synapse-accesscontrol"; +const packageVersion = "1.0.0"; + +export class AccessControlClientContext extends coreHttp.ServiceClient { + endpoint: string; + apiVersion: string; + + /** + * Initializes a new instance of the AccessControlClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: AccessControlClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = options.endpoint || "{endpoint}"; + + // Parameter assignments + this.endpoint = endpoint; + + // Assigning values to Constant parameters + this.apiVersion = options.apiVersion || "2020-02-01-preview"; + } +} diff --git a/sdk/synapse/synapse-accesscontrol/src/index.ts b/sdk/synapse/synapse-accesscontrol/src/index.ts new file mode 100644 index 000000000000..395119853995 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/src/index.ts @@ -0,0 +1,4 @@ +/// +export * from "./models"; +export { AccessControlClient } from "./accessControlClient"; +export { AccessControlClientContext } from "./accessControlClientContext"; diff --git a/sdk/synapse/synapse-accesscontrol/src/models/index.ts b/sdk/synapse/synapse-accesscontrol/src/models/index.ts new file mode 100644 index 000000000000..e651fb487f46 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/src/models/index.ts @@ -0,0 +1,282 @@ +import * as coreHttp from "@azure/core-http"; + +/** + * A list of Synapse roles available. + */ +export interface RolesListResponse { + /** + * List of Synapse roles. + */ + value: SynapseRole[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Synapse role details + */ +export interface SynapseRole { + /** + * Role ID + */ + id?: string; + /** + * Name of the Synapse role + */ + name?: string; + /** + * Is a built-in role or not + */ + isBuiltIn: boolean; +} + +/** + * Contains details when the response code indicates an error. + */ +export interface ErrorContract { + /** + * The error details. + */ + error?: ErrorResponse; +} + +export interface ErrorResponse { + code: string; + message: string; + target?: string; + details?: ErrorDetail[]; +} + +export interface ErrorDetail { + code: string; + message: string; + target?: string; +} + +/** + * Role Assignment request details + */ +export interface RoleAssignmentOptions { + /** + * Role ID of the Synapse Built-In Role + */ + roleId: string; + /** + * Object ID of the AAD principal or security-group + */ + principalId: string; +} + +/** + * Role Assignment response details + */ +export interface RoleAssignmentDetails { + /** + * Role Assignment ID + */ + id?: string; + /** + * Role ID of the Synapse Built-In Role + */ + roleId?: string; + /** + * Object ID of the AAD principal or security-group + */ + principalId?: string; +} + +/** + * Defines headers for AccessControlClient_getRoleAssignments operation. + */ +export interface AccessControlClientGetRoleAssignmentsHeaders { + /** + * If the number of role assignments to be listed exceeds the maxResults limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the list operation to continue listing the role assignments. + */ + xMsContinuation?: string; +} + +/** + * Contains response data for the getRoleDefinitions operation. + */ +export type AccessControlClientGetRoleDefinitionsResponse = RolesListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RolesListResponse; + }; +}; + +/** + * Contains response data for the getRoleDefinitionById operation. + */ +export type AccessControlClientGetRoleDefinitionByIdResponse = SynapseRole & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SynapseRole; + }; +}; + +/** + * Contains response data for the createRoleAssignment operation. + */ +export type AccessControlClientCreateRoleAssignmentResponse = RoleAssignmentDetails & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RoleAssignmentDetails; + }; +}; + +/** + * Optional parameters. + */ +export interface AccessControlClientGetRoleAssignmentsOptionalParams + extends coreHttp.OperationOptions { + /** + * Synapse Built-In Role Id. + */ + roleId?: string; + /** + * Object ID of the AAD principal or security-group. + */ + principalId?: string; + /** + * Continuation token. + */ + continuationToken?: string; +} + +/** + * Contains response data for the getRoleAssignments operation. + */ +export type AccessControlClientGetRoleAssignmentsResponse = AccessControlClientGetRoleAssignmentsHeaders & + RoleAssignmentDetails[] & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RoleAssignmentDetails[]; + /** + * The parsed HTTP response headers. + */ + parsedHeaders: AccessControlClientGetRoleAssignmentsHeaders; + }; + }; + +/** + * Contains response data for the getRoleAssignmentById operation. + */ +export type AccessControlClientGetRoleAssignmentByIdResponse = RoleAssignmentDetails & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RoleAssignmentDetails; + }; +}; + +/** + * Contains response data for the getCallerRoleAssignments operation. + */ +export type AccessControlClientGetCallerRoleAssignmentsResponse = { + /** + * The parsed response body. + */ + body: string[]; + + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: string[]; + }; +}; + +/** + * Contains response data for the getRoleDefinitionsNext operation. + */ +export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RolesListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface AccessControlClientOptionalParams + extends coreHttp.ServiceClientOptions { + /** + * Api Version + */ + apiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-accesscontrol/src/models/mappers.ts b/sdk/synapse/synapse-accesscontrol/src/models/mappers.ts new file mode 100644 index 000000000000..a7b674d7d875 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/src/models/mappers.ts @@ -0,0 +1,208 @@ +import * as coreHttp from "@azure/core-http"; + +export const RolesListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RolesListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SynapseRole" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const SynapseRole: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SynapseRole", + modelProperties: { + id: { + serializedName: "id", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + isBuiltIn: { + serializedName: "isBuiltIn", + required: true, + type: { + name: "Boolean" + } + } + } + } +}; + +export const ErrorContract: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorContract", + modelProperties: { + error: { + serializedName: "error", + type: { + name: "Composite", + className: "ErrorResponse" + } + } + } + } +}; + +export const ErrorResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorResponse", + modelProperties: { + code: { + serializedName: "code", + required: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + required: true, + type: { + name: "String" + } + }, + target: { + serializedName: "target", + type: { + name: "String" + } + }, + details: { + serializedName: "details", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorDetail" + } + } + } + } + } + } +}; + +export const ErrorDetail: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorDetail", + modelProperties: { + code: { + serializedName: "code", + required: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + required: true, + type: { + name: "String" + } + }, + target: { + serializedName: "target", + type: { + name: "String" + } + } + } + } +}; + +export const RoleAssignmentOptions: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RoleAssignmentOptions", + modelProperties: { + roleId: { + serializedName: "roleId", + required: true, + type: { + name: "String" + } + }, + principalId: { + serializedName: "principalId", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const RoleAssignmentDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RoleAssignmentDetails", + modelProperties: { + id: { + serializedName: "id", + type: { + name: "String" + } + }, + roleId: { + serializedName: "roleId", + type: { + name: "String" + } + }, + principalId: { + serializedName: "principalId", + type: { + name: "String" + } + } + } + } +}; + +export const AccessControlClientGetRoleAssignmentsHeaders: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AccessControlClientGetRoleAssignmentsHeaders", + modelProperties: { + xMsContinuation: { + serializedName: "x-ms-continuation", + type: { + name: "String" + } + } + } + } +}; diff --git a/sdk/synapse/synapse-accesscontrol/src/models/parameters.ts b/sdk/synapse/synapse-accesscontrol/src/models/parameters.ts new file mode 100644 index 000000000000..851797e88961 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/src/models/parameters.ts @@ -0,0 +1,126 @@ +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; +import { RoleAssignmentOptions as RoleAssignmentOptionsMapper } from "../models/mappers"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const apiVersion: OperationQueryParameter = { + parameterPath: "apiVersion", + mapper: { + defaultValue: "2020-02-01-preview", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const roleId: OperationURLParameter = { + parameterPath: "roleId", + mapper: { + serializedName: "roleId", + required: true, + type: { + name: "String" + } + } +}; + +export const contentType: OperationParameter = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; + +export const createRoleAssignmentOptions: OperationParameter = { + parameterPath: "createRoleAssignmentOptions", + mapper: RoleAssignmentOptionsMapper +}; + +export const roleId1: OperationQueryParameter = { + parameterPath: ["options", "roleId"], + mapper: { + serializedName: "roleId", + type: { + name: "String" + } + } +}; + +export const principalId: OperationQueryParameter = { + parameterPath: ["options", "principalId"], + mapper: { + serializedName: "principalId", + type: { + name: "String" + } + } +}; + +export const continuationToken: OperationParameter = { + parameterPath: ["options", "continuationToken"], + mapper: { + serializedName: "x-ms-continuation", + type: { + name: "String" + } + } +}; + +export const roleAssignmentId: OperationURLParameter = { + parameterPath: "roleAssignmentId", + mapper: { + constraints: { + MinLength: 1 + }, + serializedName: "roleAssignmentId", + required: true, + type: { + name: "String" + } + } +}; + +export const nextLink: OperationURLParameter = { + parameterPath: "nextLink", + mapper: { + serializedName: "nextLink", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; diff --git a/sdk/synapse/synapse-accesscontrol/tsconfig.json b/sdk/synapse/synapse-accesscontrol/tsconfig.json new file mode 100644 index 000000000000..0290d6707a44 --- /dev/null +++ b/sdk/synapse/synapse-accesscontrol/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/sdk/synapse/synapse-artifacts/README.md b/sdk/synapse/synapse-artifacts/README.md new file mode 100644 index 000000000000..165d8e954429 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/README.md @@ -0,0 +1,27 @@ +## Azure ArtifactsClient SDK for JavaScript + +This package contains an isomorphic SDK for ArtifactsClient. + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +### How to Install + +```bash +npm install @azure/synapse-artifacts +``` + +### How to use + +#### Sample code + +Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file diff --git a/sdk/synapse/synapse-artifacts/api-extractor.json b/sdk/synapse/synapse-artifacts/api-extractor.json new file mode 100644 index 000000000000..5fb5b21b56f2 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./esm/index.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json new file mode 100644 index 000000000000..eac600ab121c --- /dev/null +++ b/sdk/synapse/synapse-artifacts/package.json @@ -0,0 +1,53 @@ +{ + "name": "@azure/synapse-artifacts", + "author": "Microsoft Corporation", + "description": "A generated SDK for ArtifactsClient.", + "version": "1.0.0", + "dependencies": { + "@azure/core-lro": "^1.0.2", + "@azure/core-paging": "^1.1.1", + "@azure/core-http": "^1.2.0", + "tslib": "^2.0.0" + }, + "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "license": "MIT", + "main": "./dist/synapse-artifacts.js", + "module": "./esm/index.js", + "types": "./esm/index.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js", + "repository": { + "type": "git", + "url": "https://github.com/Azure/azure-sdk-for-js.git" + }, + "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "src/**/*.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-artifacts.js.map'\" -o ./dist/synapse-artifacts.min.js ./dist/synapse-artifacts.js", + "prepack": "npm install && npm run build", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md new file mode 100644 index 000000000000..ae28e08cd661 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -0,0 +1,6619 @@ +## API Report File for "@azure/synapse-artifacts" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; +import { HttpMethods } from '@azure/core-http'; +import { HttpOperationResponse } from '@azure/core-http'; +import { OperationArguments } from '@azure/core-http'; +import { OperationSpec } from '@azure/core-http'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { Poller } from '@azure/core-lro'; +import { PollOperationState } from '@azure/core-lro'; +import { RestResponse } from '@azure/core-http'; + +// @public +export interface Activity { + [property: string]: any; + dependsOn?: ActivityDependency[]; + description?: string; + name: string; + type: "Container" | "Execution" | "Copy" | "HDInsightHive" | "HDInsightPig" | "HDInsightMapReduce" | "HDInsightStreaming" | "HDInsightSpark" | "ExecuteSSISPackage" | "Custom" | "SqlServerStoredProcedure" | "ExecutePipeline" | "Delete" | "AzureDataExplorerCommand" | "Lookup" | "WebActivity" | "GetMetadata" | "IfCondition" | "Switch" | "ForEach" | "AzureMLBatchExecution" | "AzureMLUpdateResource" | "AzureMLExecutePipeline" | "DataLakeAnalyticsU-SQL" | "Wait" | "Until" | "Validation" | "Filter" | "DatabricksNotebook" | "DatabricksSparkJar" | "DatabricksSparkPython" | "SetVariable" | "AppendVariable" | "AzureFunctionActivity" | "WebHook" | "ExecuteDataFlow" | "SynapseNotebook" | "SparkJob" | "SqlPoolStoredProcedure"; + userProperties?: UserProperty[]; +} + +// @public +export interface ActivityDependency { + [property: string]: any; + activity: string; + dependencyConditions: DependencyCondition[]; +} + +// @public +export interface ActivityPolicy { + [property: string]: any; + retry?: any; + retryIntervalInSeconds?: number; + secureInput?: boolean; + secureOutput?: boolean; + timeout?: any; +} + +// @public +export interface ActivityRun { + [property: string]: any; + readonly activityName?: string; + readonly activityRunEnd?: Date; + readonly activityRunId?: string; + readonly activityRunStart?: Date; + readonly activityType?: string; + readonly durationInMs?: number; + readonly error?: any; + readonly input?: any; + readonly linkedServiceName?: string; + readonly output?: any; + readonly pipelineName?: string; + readonly pipelineRunId?: string; + readonly status?: string; +} + +// @public +export interface ActivityRunsQueryResponse { + continuationToken?: string; + value: ActivityRun[]; +} + +// @public (undocumented) +export type ActivityUnion = ControlActivity | ExecutionActivityUnion | ExecutePipelineActivity | IfConditionActivity | SwitchActivity | ForEachActivity | WaitActivity | UntilActivity | ValidationActivity | FilterActivity | SetVariableActivity | AppendVariableActivity | WebHookActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity | SqlPoolStoredProcedureActivity; + +// @public +export interface AddDataFlowToDebugSessionResponse { + jobVersion?: string; +} + +// @public +export type AmazonMWSLinkedService = LinkedService & { + endpoint: any; + marketplaceID: any; + sellerID: any; + mwsAuthToken?: SecretBaseUnion; + accessKeyId: any; + secretKey?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type AmazonMWSObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type AmazonMWSSource = TabularSource & { + query?: any; +}; + +// @public +export type AmazonRedshiftLinkedService = LinkedService & { + server: any; + username?: any; + password?: SecretBaseUnion; + database: any; + port?: any; + encryptedCredential?: any; +}; + +// @public +export type AmazonRedshiftSource = TabularSource & { + query?: any; + redshiftUnloadSettings?: RedshiftUnloadSettings; +}; + +// @public +export type AmazonRedshiftTableDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type AmazonS3LinkedService = LinkedService & { + accessKeyId?: any; + secretAccessKey?: SecretBaseUnion; + serviceUrl?: any; + encryptedCredential?: any; +}; + +// @public +export type AmazonS3Location = DatasetLocation & { + bucketName?: any; + version?: any; +}; + +// @public +export type AmazonS3ReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + prefix?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AppendVariableActivity = Activity & { + variableName?: string; + value?: any; +}; + +// @public +export interface ArtifactRenameRequest { + newName?: string; +} + +// @public (undocumented) +export class ArtifactsClient extends ArtifactsClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ArtifactsClientOptionalParams); + // Warning: (ae-forgotten-export) The symbol "BigDataPools" needs to be exported by the entry point index.d.ts + // + // (undocumented) + bigDataPools: BigDataPools; + // Warning: (ae-forgotten-export) The symbol "DataFlow" needs to be exported by the entry point index.d.ts + // + // (undocumented) + dataFlow: DataFlow_2; + // Warning: (ae-forgotten-export) The symbol "DataFlowDebugSession" needs to be exported by the entry point index.d.ts + // + // (undocumented) + dataFlowDebugSession: DataFlowDebugSession; + // Warning: (ae-forgotten-export) The symbol "Dataset" needs to be exported by the entry point index.d.ts + // + // (undocumented) + dataset: Dataset_2; + // Warning: (ae-forgotten-export) The symbol "IntegrationRuntimes" needs to be exported by the entry point index.d.ts + // + // (undocumented) + integrationRuntimes: IntegrationRuntimes; + // Warning: (ae-forgotten-export) The symbol "LinkedService" needs to be exported by the entry point index.d.ts + // + // (undocumented) + linkedService: LinkedService_2; + // Warning: (ae-forgotten-export) The symbol "Notebook" needs to be exported by the entry point index.d.ts + // + // (undocumented) + notebook: Notebook_2; + // Warning: (ae-forgotten-export) The symbol "Pipeline" needs to be exported by the entry point index.d.ts + // + // (undocumented) + pipeline: Pipeline; + // Warning: (ae-forgotten-export) The symbol "PipelineRun" needs to be exported by the entry point index.d.ts + // + // (undocumented) + pipelineRun: PipelineRun_2; + // Warning: (ae-forgotten-export) The symbol "SparkJobDefinition" needs to be exported by the entry point index.d.ts + // + // (undocumented) + sparkJobDefinition: SparkJobDefinition_2; + // Warning: (ae-forgotten-export) The symbol "SqlPools" needs to be exported by the entry point index.d.ts + // + // (undocumented) + sqlPools: SqlPools; + // Warning: (ae-forgotten-export) The symbol "SqlScript" needs to be exported by the entry point index.d.ts + // + // (undocumented) + sqlScript: SqlScript_2; + // Warning: (ae-forgotten-export) The symbol "Trigger" needs to be exported by the entry point index.d.ts + // + // (undocumented) + trigger: Trigger_2; + // Warning: (ae-forgotten-export) The symbol "TriggerRun" needs to be exported by the entry point index.d.ts + // + // (undocumented) + triggerRun: TriggerRun_2; + // Warning: (ae-forgotten-export) The symbol "Workspace" needs to be exported by the entry point index.d.ts + // + // (undocumented) + workspace: Workspace_2; + // Warning: (ae-forgotten-export) The symbol "WorkspaceGitRepoManagement" needs to be exported by the entry point index.d.ts + // + // (undocumented) + workspaceGitRepoManagement: WorkspaceGitRepoManagement; +} + +// @public (undocumented) +export class ArtifactsClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ArtifactsClientOptionalParams); + // (undocumented) + apiVersion: string; + // (undocumented) + endpoint: string; +} + +// @public +export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { + apiVersion?: string; + endpoint?: string; +} + +// @public +export interface AutoPauseProperties { + delayInMinutes?: number; + enabled?: boolean; +} + +// @public +export interface AutoScaleProperties { + enabled?: boolean; + maxNodeCount?: number; + minNodeCount?: number; +} + +// @public +export type AvroCompressionCodec = string; + +// @public +export type AvroDataset = Dataset & { + location?: DatasetLocationUnion; + avroCompressionCodec?: AvroCompressionCodec; + avroCompressionLevel?: number; +}; + +// @public +export type AvroFormat = DatasetStorageFormat & {}; + +// @public +export type AvroSink = CopySink & { + storeSettings?: StoreWriteSettingsUnion; + formatSettings?: AvroWriteSettings; +}; + +// @public +export type AvroSource = CopySource & { + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export type AvroWriteSettings = FormatWriteSettings & { + recordName?: string; + recordNamespace?: string; +}; + +// @public +export type AzureBatchLinkedService = LinkedService & { + accountName: any; + accessKey?: SecretBaseUnion; + batchUri: any; + poolName: any; + linkedServiceName: LinkedServiceReference; + encryptedCredential?: any; +}; + +// @public +export type AzureBlobFSLinkedService = LinkedService & { + url: any; + accountKey?: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureBlobFSLocation = DatasetLocation & { + fileSystem?: any; +}; + +// @public +export type AzureBlobFSReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AzureBlobFSSink = CopySink & { + copyBehavior?: any; +}; + +// @public +export type AzureBlobFSSource = CopySource & { + treatEmptyAsNull?: any; + skipHeaderLineCount?: any; + recursive?: any; +}; + +// @public +export type AzureBlobFSWriteSettings = StoreWriteSettings & { + blockSizeInMB?: any; +}; + +// @public +export type AzureBlobStorageLinkedService = LinkedService & { + connectionString?: any; + accountKey?: AzureKeyVaultSecretReference; + sasUri?: any; + sasToken?: AzureKeyVaultSecretReference; + serviceEndpoint?: string; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: string; +}; + +// @public +export type AzureBlobStorageLocation = DatasetLocation & { + container?: any; +}; + +// @public +export type AzureBlobStorageReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + prefix?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AzureBlobStorageWriteSettings = StoreWriteSettings & { + blockSizeInMB?: any; +}; + +// @public +export type AzureDatabricksLinkedService = LinkedService & { + domain: any; + accessToken: SecretBaseUnion; + existingClusterId?: any; + instancePoolId?: any; + newClusterVersion?: any; + newClusterNumOfWorker?: any; + newClusterNodeType?: any; + newClusterSparkConf?: { + [propertyName: string]: any; + }; + newClusterSparkEnvVars?: { + [propertyName: string]: any; + }; + newClusterCustomTags?: { + [propertyName: string]: any; + }; + newClusterDriverNodeType?: any; + newClusterInitScripts?: any; + newClusterEnableElasticDisk?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureDataExplorerCommandActivity = ExecutionActivity & { + command: any; + commandTimeout?: any; +}; + +// @public +export type AzureDataExplorerLinkedService = LinkedService & { + endpoint: any; + servicePrincipalId: any; + servicePrincipalKey: SecretBaseUnion; + database: any; + tenant: any; +}; + +// @public +export type AzureDataExplorerSink = CopySink & { + ingestionMappingName?: any; + ingestionMappingAsJson?: any; + flushImmediately?: any; +}; + +// @public +export type AzureDataExplorerSource = CopySource & { + query: any; + noTruncation?: any; + queryTimeout?: any; +}; + +// @public +export type AzureDataExplorerTableDataset = Dataset & { + table?: any; +}; + +// @public +export type AzureDataLakeAnalyticsLinkedService = LinkedService & { + accountName: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant: any; + subscriptionId?: any; + resourceGroupName?: any; + dataLakeAnalyticsUri?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureDataLakeStoreLinkedService = LinkedService & { + dataLakeStoreUri: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + accountName?: any; + subscriptionId?: any; + resourceGroupName?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureDataLakeStoreLocation = DatasetLocation & {}; + +// @public +export type AzureDataLakeStoreReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AzureDataLakeStoreSink = CopySink & { + copyBehavior?: any; + enableAdlsSingleFileParallel?: any; +}; + +// @public +export type AzureDataLakeStoreSource = CopySource & { + recursive?: any; +}; + +// @public +export type AzureDataLakeStoreWriteSettings = StoreWriteSettings & {}; + +// @public +export type AzureEntityResource = Resource & { + readonly etag?: string; +}; + +// @public +export type AzureFileStorageLinkedService = LinkedService & { + host: any; + userId?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type AzureFileStorageLocation = DatasetLocation & {}; + +// @public +export type AzureFileStorageReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AzureFunctionActivity = ExecutionActivity & { + method: AzureFunctionActivityMethod; + functionName: any; + headers?: any; + body?: any; +}; + +// @public +export type AzureFunctionActivityMethod = string; + +// @public +export type AzureFunctionLinkedService = LinkedService & { + functionAppUrl: any; + functionKey?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type AzureKeyVaultLinkedService = LinkedService & { + baseUrl: any; +}; + +// @public +export type AzureKeyVaultSecretReference = SecretBase & { + store: LinkedServiceReference; + secretName: any; + secretVersion?: any; +}; + +// @public +export type AzureMariaDBLinkedService = LinkedService & { + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type AzureMariaDBSource = TabularSource & { + query?: any; +}; + +// @public +export type AzureMariaDBTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export type AzureMLBatchExecutionActivity = ExecutionActivity & { + globalParameters?: { + [propertyName: string]: any; + }; + webServiceOutputs?: { + [propertyName: string]: AzureMLWebServiceFile; + }; + webServiceInputs?: { + [propertyName: string]: AzureMLWebServiceFile; + }; +}; + +// @public +export type AzureMLExecutePipelineActivity = ExecutionActivity & { + mlPipelineId: any; + experimentName?: any; + mlPipelineParameters?: any; + mlParentRunId?: any; + continueOnStepFailure?: any; +}; + +// @public +export type AzureMLLinkedService = LinkedService & { + mlEndpoint: any; + apiKey: SecretBaseUnion; + updateResourceEndpoint?: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureMLServiceLinkedService = LinkedService & { + subscriptionId: any; + resourceGroupName: any; + mlWorkspaceName: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureMLUpdateResourceActivity = ExecutionActivity & { + trainedModelName: any; + trainedModelLinkedServiceName: LinkedServiceReference; + trainedModelFilePath: any; +}; + +// @public +export interface AzureMLWebServiceFile { + filePath: any; + linkedServiceName: LinkedServiceReference; +} + +// @public +export type AzureMySqlLinkedService = LinkedService & { + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type AzureMySqlSink = CopySink & { + preCopyScript?: any; +}; + +// @public +export type AzureMySqlSource = TabularSource & { + query?: any; +}; + +// @public +export type AzureMySqlTableDataset = Dataset & { + tableName?: any; + table?: any; +}; + +// @public +export type AzurePostgreSqlLinkedService = LinkedService & { + connectionString?: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type AzurePostgreSqlSink = CopySink & { + preCopyScript?: any; +}; + +// @public +export type AzurePostgreSqlSource = TabularSource & { + query?: any; +}; + +// @public +export type AzurePostgreSqlTableDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type AzureQueueSink = CopySink & {}; + +// @public +export type AzureSearchIndexDataset = Dataset & { + indexName: any; +}; + +// @public +export type AzureSearchIndexSink = CopySink & { + writeBehavior?: AzureSearchIndexWriteBehaviorType; +}; + +// @public +export type AzureSearchIndexWriteBehaviorType = string; + +// @public +export type AzureSearchLinkedService = LinkedService & { + url: any; + key?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type AzureSqlDatabaseLinkedService = LinkedService & { + connectionString: any; + password?: AzureKeyVaultSecretReference; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureSqlDWLinkedService = LinkedService & { + connectionString: any; + password?: AzureKeyVaultSecretReference; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureSqlDWTableDataset = Dataset & { + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type AzureSqlMILinkedService = LinkedService & { + connectionString: any; + password?: AzureKeyVaultSecretReference; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureSqlMITableDataset = Dataset & { + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type AzureSqlSink = CopySink & { + sqlWriterStoredProcedureName?: any; + sqlWriterTableType?: any; + preCopyScript?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + storedProcedureTableTypeParameterName?: any; + tableOption?: any; +}; + +// @public +export type AzureSqlSource = TabularSource & { + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + produceAdditionalTypes?: any; +}; + +// @public +export type AzureSqlTableDataset = Dataset & { + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type AzureStorageLinkedService = LinkedService & { + connectionString?: any; + accountKey?: AzureKeyVaultSecretReference; + sasUri?: any; + sasToken?: AzureKeyVaultSecretReference; + encryptedCredential?: string; +}; + +// @public +export type AzureTableDataset = Dataset & { + tableName: any; +}; + +// @public +export type AzureTableSink = CopySink & { + azureTableDefaultPartitionKeyValue?: any; + azureTablePartitionKeyName?: any; + azureTableRowKeyName?: any; + azureTableInsertType?: any; +}; + +// @public +export type AzureTableSource = TabularSource & { + azureTableSourceQuery?: any; + azureTableSourceIgnoreTableNotFound?: any; +}; + +// @public +export type AzureTableStorageLinkedService = LinkedService & { + connectionString?: any; + accountKey?: AzureKeyVaultSecretReference; + sasUri?: any; + sasToken?: AzureKeyVaultSecretReference; + encryptedCredential?: string; +}; + +// @public +export interface BigDataPoolReference { + referenceName: string; + type: BigDataPoolReferenceType; +} + +// @public +export type BigDataPoolReferenceType = string; + +// @public +export type BigDataPoolResourceInfo = TrackedResource & { + provisioningState?: string; + autoScale?: AutoScaleProperties; + creationDate?: Date; + autoPause?: AutoPauseProperties; + isComputeIsolationEnabled?: boolean; + haveLibraryRequirementsChanged?: boolean; + sessionLevelPackagesEnabled?: boolean; + sparkEventsFolder?: string; + nodeCount?: number; + libraryRequirements?: LibraryRequirements; + sparkConfigProperties?: LibraryRequirements; + sparkVersion?: string; + defaultSparkLogFolder?: string; + nodeSize?: NodeSize; + nodeSizeFamily?: NodeSizeFamily; +}; + +// @public +export interface BigDataPoolResourceInfoListResult { + nextLink?: string; + value?: BigDataPoolResourceInfo[]; +} + +// @public +export type BigDataPoolsGetResponse = BigDataPoolResourceInfo & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: BigDataPoolResourceInfo; + }; +}; + +// @public +export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: BigDataPoolResourceInfoListResult; + }; +}; + +// @public +export type BinaryDataset = Dataset & { + location?: DatasetLocationUnion; + compression?: DatasetCompressionUnion; +}; + +// @public +export type BinarySink = CopySink & { + storeSettings?: StoreWriteSettingsUnion; +}; + +// @public +export type BinarySource = CopySource & { + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export type BlobEventsTrigger = MultiplePipelineTrigger & { + blobPathBeginsWith?: string; + blobPathEndsWith?: string; + ignoreEmptyBlobs?: boolean; + events: BlobEventTypes[]; + scope: string; +}; + +// @public +export type BlobEventTypes = string; + +// @public +export type BlobSink = CopySink & { + blobWriterOverwriteFiles?: any; + blobWriterDateTimeFormat?: any; + blobWriterAddHeader?: any; + copyBehavior?: any; +}; + +// @public +export type BlobSource = CopySource & { + treatEmptyAsNull?: any; + skipHeaderLineCount?: any; + recursive?: any; +}; + +// @public +export type BlobTrigger = MultiplePipelineTrigger & { + folderPath: string; + maxConcurrency: number; + linkedService: LinkedServiceReference; +}; + +// @public +export type CassandraLinkedService = LinkedService & { + host: any; + authenticationType?: any; + port?: any; + username?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type CassandraSource = TabularSource & { + query?: any; + consistencyLevel?: CassandraSourceReadConsistencyLevels; +}; + +// @public +export type CassandraSourceReadConsistencyLevels = string; + +// @public +export type CassandraTableDataset = Dataset & { + tableName?: any; + keyspace?: any; +}; + +// @public +export type CellOutputType = string; + +// @public +export type ChainingTrigger = Trigger & { + pipeline: TriggerPipelineReference; + dependsOn: PipelineReference[]; + runDimension: string; +}; + +// @public +export interface CloudError { + code: string; + details?: CloudError[]; + message: string; + target?: string; +} + +// @public +export type CommonDataServiceForAppsEntityDataset = Dataset & { + entityName?: any; +}; + +// @public +export type CommonDataServiceForAppsLinkedService = LinkedService & { + deploymentType: DynamicsDeploymentType; + hostName?: any; + port?: any; + serviceUri?: any; + organizationName?: any; + authenticationType: DynamicsAuthenticationType; + username?: any; + password?: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredential?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type CommonDataServiceForAppsSink = CopySink & { + writeBehavior: DynamicsSinkWriteBehavior; + ignoreNullValues?: any; + alternateKeyName?: any; +}; + +// @public +export type CommonDataServiceForAppsSource = CopySource & { + query?: any; +}; + +// @public +export type ConcurLinkedService = LinkedService & { + clientId: any; + username: any; + password?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ConcurObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type ConcurSource = TabularSource & { + query?: any; +}; + +// @public +export type ControlActivity = Activity & {}; + +// @public +export type CopyActivity = ExecutionActivity & { + inputs?: DatasetReference[]; + outputs?: DatasetReference[]; + source: CopySourceUnion; + sink: CopySinkUnion; + translator?: any; + enableStaging?: any; + stagingSettings?: StagingSettings; + parallelCopies?: any; + dataIntegrationUnits?: any; + enableSkipIncompatibleRow?: any; + redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings; + preserveRules?: any[]; + preserve?: any[]; +}; + +// @public +export type CopyBehaviorType = string; + +// @public +export interface CopySink { + [property: string]: any; + maxConcurrentConnections?: any; + sinkRetryCount?: any; + sinkRetryWait?: any; + type: "DelimitedTextSink" | "JsonSink" | "OrcSink" | "AzurePostgreSqlSink" | "AzureMySqlSink" | "SapCloudForCustomerSink" | "AzureQueueSink" | "AzureTableSink" | "AvroSink" | "ParquetSink" | "BinarySink" | "BlobSink" | "FileSystemSink" | "DocumentDbCollectionSink" | "CosmosDbSqlApiSink" | "SqlSink" | "SqlServerSink" | "AzureSqlSink" | "SqlMISink" | "SqlDWSink" | "OracleSink" | "AzureDataLakeStoreSink" | "AzureBlobFSSink" | "AzureSearchIndexSink" | "OdbcSink" | "InformixSink" | "MicrosoftAccessSink" | "DynamicsSink" | "DynamicsCrmSink" | "CommonDataServiceForAppsSink" | "AzureDataExplorerSink" | "SalesforceSink" | "SalesforceServiceCloudSink" | "CosmosDbMongoDbApiSink"; + writeBatchSize?: any; + writeBatchTimeout?: any; +} + +// @public (undocumented) +export type CopySinkUnion = DelimitedTextSink | JsonSink | OrcSink | AzurePostgreSqlSink | AzureMySqlSink | SapCloudForCustomerSink | AzureQueueSink | AzureTableSink | AvroSink | ParquetSink | BinarySink | BlobSink | FileSystemSink | DocumentDbCollectionSink | CosmosDbSqlApiSink | SqlSink | SqlServerSink | AzureSqlSink | SqlMISink | SqlDWSink | OracleSink | AzureDataLakeStoreSink | AzureBlobFSSink | AzureSearchIndexSink | OdbcSink | InformixSink | MicrosoftAccessSink | DynamicsSink | DynamicsCrmSink | CommonDataServiceForAppsSink | AzureDataExplorerSink | SalesforceSink | SalesforceServiceCloudSink | CosmosDbMongoDbApiSink; + +// @public +export interface CopySource { + [property: string]: any; + maxConcurrentConnections?: any; + sourceRetryCount?: any; + sourceRetryWait?: any; + type: "AvroSource" | "ParquetSource" | "DelimitedTextSource" | "JsonSource" | "OrcSource" | "BinarySource" | "TabularSource" | "AzureTableSource" | "BlobSource" | "DocumentDbCollectionSource" | "CosmosDbSqlApiSource" | "DynamicsSource" | "DynamicsCrmSource" | "CommonDataServiceForAppsSource" | "RelationalSource" | "InformixSource" | "MicrosoftAccessSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "ODataSource" | "SalesforceSource" | "SalesforceServiceCloudSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "RestSource" | "SqlSource" | "SqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "FileSystemSource" | "HdfsSource" | "AzureMySqlSource" | "AzureDataExplorerSource" | "OracleSource" | "TeradataSource" | "WebSource" | "CassandraSource" | "MongoDbSource" | "MongoDbV2Source" | "CosmosDbMongoDbApiSource" | "Office365Source" | "AzureDataLakeStoreSource" | "AzureBlobFSSource" | "HttpSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource"; +} + +// @public (undocumented) +export type CopySourceUnion = AvroSource | ParquetSource | DelimitedTextSource | JsonSource | OrcSource | BinarySource | TabularSourceUnion | BlobSource | DocumentDbCollectionSource | CosmosDbSqlApiSource | DynamicsSource | DynamicsCrmSource | CommonDataServiceForAppsSource | RelationalSource | MicrosoftAccessSource | ODataSource | SalesforceServiceCloudSource | RestSource | FileSystemSource | HdfsSource | AzureDataExplorerSource | OracleSource | WebSource | MongoDbSource | MongoDbV2Source | CosmosDbMongoDbApiSource | Office365Source | AzureDataLakeStoreSource | AzureBlobFSSource | HttpSource; + +// @public +export interface CopyTranslator { + [property: string]: any; + type: "TabularTranslator"; +} + +// @public (undocumented) +export type CopyTranslatorUnion = TabularTranslator; + +// @public +export type CosmosDbLinkedService = LinkedService & { + connectionString?: any; + accountEndpoint?: any; + database?: any; + accountKey?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type CosmosDbMongoDbApiCollectionDataset = Dataset & { + collection: any; +}; + +// @public +export type CosmosDbMongoDbApiLinkedService = LinkedService & { + connectionString: any; + database: any; +}; + +// @public +export type CosmosDbMongoDbApiSink = CopySink & { + writeBehavior?: any; +}; + +// @public +export type CosmosDbMongoDbApiSource = CopySource & { + filter?: any; + cursorMethods?: MongoDbCursorMethodsProperties; + batchSize?: any; + queryTimeout?: any; +}; + +// @public +export type CosmosDbSqlApiCollectionDataset = Dataset & { + collectionName: any; +}; + +// @public +export type CosmosDbSqlApiSink = CopySink & { + writeBehavior?: any; +}; + +// @public +export type CosmosDbSqlApiSource = CopySource & { + query?: any; + pageSize?: any; + preferredRegions?: any; +}; + +// @public +export type CouchbaseLinkedService = LinkedService & { + connectionString?: any; + credString?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type CouchbaseSource = TabularSource & { + query?: any; +}; + +// @public +export type CouchbaseTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export interface CreateDataFlowDebugSessionRequest { + clusterTimeout?: number; + dataBricksLinkedService?: LinkedServiceResource; + dataFlowName?: string; + existingClusterId?: string; + newClusterName?: string; + newClusterNodeType?: string; +} + +// @public +export interface CreateDataFlowDebugSessionResponse { + sessionId?: string; +} + +// @public +export interface CreateRunResponse { + runId: string; +} + +// @public +export type CustomActivity = ExecutionActivity & { + command: any; + resourceLinkedService?: LinkedServiceReference; + folderPath?: any; + referenceObjects?: CustomActivityReferenceObject; + extendedProperties?: { + [propertyName: string]: any; + }; + retentionTimeInDays?: any; +}; + +// @public +export interface CustomActivityReferenceObject { + datasets?: DatasetReference[]; + linkedServices?: LinkedServiceReference[]; +} + +// @public +export type CustomDataset = Dataset & { + typeProperties?: any; +}; + +// @public +export type CustomDataSourceLinkedService = LinkedService & { + typeProperties: any; +}; + +// @public +export interface CustomerManagedKeyDetails { + key?: WorkspaceKeyDetails; + readonly status?: string; +} + +// @public +export interface CustomSetupBase { + type: string; +} + +// @public +export type DatabricksNotebookActivity = ExecutionActivity & { + notebookPath: any; + baseParameters?: { + [propertyName: string]: any; + }; + libraries?: { + [propertyName: string]: any; + }[]; +}; + +// @public +export type DatabricksSparkJarActivity = ExecutionActivity & { + mainClassName: any; + parameters?: any[]; + libraries?: { + [propertyName: string]: any; + }[]; +}; + +// @public +export type DatabricksSparkPythonActivity = ExecutionActivity & { + pythonFile: any; + parameters?: any[]; + libraries?: { + [propertyName: string]: any; + }[]; +}; + +// @public +export interface DataFlow { + annotations?: any[]; + description?: string; + folder?: DataFlowFolder; + type: "MappingDataFlow"; +} + +// @public +export type DataFlowComputeType = string; + +// @public +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface DataFlowDebugCommandRequest { + commandName?: string; + commandPayload: any; + dataFlowName?: string; + sessionId: string; +} + +// @public +export interface DataFlowDebugCommandResponse { + data?: string; + status?: string; +} + +// @public +export interface DataFlowDebugPackage { + [property: string]: any; + dataFlow?: DataFlowDebugResource; + datasets?: DatasetDebugResource[]; + debugSettings?: DataFlowDebugPackageDebugSettings; + linkedServices?: LinkedServiceDebugResource[]; + sessionId?: string; + staging?: DataFlowStagingInfo; +} + +// @public +export interface DataFlowDebugPackageDebugSettings { + datasetParameters?: any; + parameters?: { + [propertyName: string]: any; + }; + sourceSettings?: DataFlowSourceSetting[]; +} + +// @public +export interface DataFlowDebugPreviewDataRequest { + dataFlowName?: string; + rowLimits?: number; + sessionId?: string; + streamName?: string; +} + +// @public +export interface DataFlowDebugQueryResponse { + runId?: string; +} + +// @public +export type DataFlowDebugResource = SubResourceDebugResource & { + properties: DataFlowUnion; +}; + +// @public +export interface DataFlowDebugResultResponse { + data?: string; + status?: string; +} + +// @public +export type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: AddDataFlowToDebugSessionResponse; + }; +}; + +// @public +export interface DataFlowDebugSessionCreateDataFlowDebugSessionHeaders { + location?: string; +} + +// @public +export type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: CreateDataFlowDebugSessionResponse; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface DataFlowDebugSessionExecuteCommandHeaders { + location?: string; +} + +// @public +export type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowDebugCommandResponse; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface DataFlowDebugSessionInfo { + [property: string]: any; + computeType?: string; + coreCount?: number; + dataFlowName?: string; + integrationRuntimeName?: string; + lastActivityTime?: string; + nodeCount?: number; + sessionId?: string; + startTime?: string; + timeToLiveInMinutes?: number; +} + +// @public +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: QueryDataFlowDebugSessionsResponse; + }; +}; + +// @public +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: QueryDataFlowDebugSessionsResponse; + }; +}; + +// @public +export interface DataFlowDebugStatisticsRequest { + columns?: string[]; + dataFlowName?: string; + sessionId?: string; + streamName?: string; +} + +// @public +export interface DataFlowFolder { + name?: string; +} + +// @public +export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type DataFlowGetDataFlowResponse = DataFlowResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowResource; + }; +}; + +// @public +export type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowListResponse; + }; +}; + +// @public +export type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowListResponse; + }; +}; + +// @public +export interface DataFlowListResponse { + nextLink?: string; + value: DataFlowResource[]; +} + +// @public +export interface DataFlowReference { + [property: string]: any; + datasetParameters?: any; + referenceName: string; + type: DataFlowReferenceType; +} + +// @public +export type DataFlowReferenceType = string; + +// @public +export type DataFlowResource = AzureEntityResource & { + properties: DataFlowUnion; +}; + +// @public +export type DataFlowSink = Transformation & { + dataset?: DatasetReference; +}; + +// @public +export type DataFlowSource = Transformation & { + dataset?: DatasetReference; +}; + +// @public +export interface DataFlowSourceSetting { + [property: string]: any; + rowLimit?: number; + sourceName?: string; +} + +// @public +export interface DataFlowStagingInfo { + folderPath?: string; + linkedService?: LinkedServiceReference; +} + +// @public (undocumented) +export type DataFlowUnion = MappingDataFlow; + +// @public +export type DataLakeAnalyticsUsqlActivity = ExecutionActivity & { + scriptPath: any; + scriptLinkedService: LinkedServiceReference; + degreeOfParallelism?: any; + priority?: any; + parameters?: { + [propertyName: string]: any; + }; + runtimeVersion?: any; + compilationMode?: any; +}; + +// @public +export interface DataLakeStorageAccountDetails { + accountUrl?: string; + filesystem?: string; +} + +// @public +export interface Dataset { + [property: string]: any; + annotations?: any[]; + description?: string; + folder?: DatasetFolder; + linkedServiceName: LinkedServiceReference; + parameters?: { + [propertyName: string]: ParameterSpecification; + }; + schema?: any; + structure?: any; + type: "Avro" | "Parquet" | "DelimitedText" | "Json" | "Orc" | "Binary" | "AzureTable" | "AzureSqlTable" | "AzureSqlMITable" | "AzureSqlDWTable" | "CassandraTable" | "CustomDataset" | "CosmosDbSqlApiCollection" | "DocumentDbCollection" | "DynamicsEntity" | "DynamicsCrmEntity" | "CommonDataServiceForAppsEntity" | "Office365Table" | "MongoDbCollection" | "MongoDbV2Collection" | "CosmosDbMongoDbApiCollection" | "ODataResource" | "OracleTable" | "TeradataTable" | "AzureMySqlTable" | "AmazonRedshiftTable" | "Db2Table" | "RelationalTable" | "InformixTable" | "OdbcTable" | "MySqlTable" | "PostgreSqlTable" | "MicrosoftAccessTable" | "SalesforceObject" | "SalesforceServiceCloudObject" | "SybaseTable" | "SapBwCube" | "SapCloudForCustomerResource" | "SapEccResource" | "SapHanaTable" | "SapOpenHubTable" | "SqlServerTable" | "RestResource" | "SapTableResource" | "WebTable" | "AzureSearchIndex" | "AmazonMWSObject" | "AzurePostgreSqlTable" | "ConcurObject" | "CouchbaseTable" | "DrillTable" | "EloquaObject" | "GoogleBigQueryObject" | "GreenplumTable" | "HBaseObject" | "HiveObject" | "HubspotObject" | "ImpalaObject" | "JiraObject" | "MagentoObject" | "MariaDBTable" | "AzureMariaDBTable" | "MarketoObject" | "PaypalObject" | "PhoenixObject" | "PrestoObject" | "QuickBooksObject" | "ServiceNowObject" | "ShopifyObject" | "SparkObject" | "SquareObject" | "XeroObject" | "ZohoObject" | "NetezzaTable" | "VerticaTable" | "SalesforceMarketingCloudObject" | "ResponsysObject" | "DynamicsAXResource" | "OracleServiceCloudObject" | "AzureDataExplorerTable" | "GoogleAdWordsObject"; +} + +// @public +export type DatasetBZip2Compression = DatasetCompression & {}; + +// @public +export interface DatasetCompression { + [property: string]: any; + type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate"; +} + +// @public +export type DatasetCompressionLevel = string; + +// @public (undocumented) +export type DatasetCompressionUnion = DatasetBZip2Compression | DatasetGZipCompression | DatasetDeflateCompression | DatasetZipDeflateCompression; + +// @public +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DatasetResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface DatasetDataElement { + name?: any; + type?: any; +} + +// @public +export type DatasetDebugResource = SubResourceDebugResource & { + properties: DatasetUnion; +}; + +// @public +export type DatasetDeflateCompression = DatasetCompression & { + level?: DatasetCompressionLevel; +}; + +// @public +export interface DatasetFolder { + name?: string; +} + +// @public +export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type DatasetGetDatasetResponse = DatasetResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DatasetResource; + }; +}; + +// @public +export type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DatasetListResponse; + }; +}; + +// @public +export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DatasetListResponse; + }; +}; + +// @public +export type DatasetGZipCompression = DatasetCompression & { + level?: DatasetCompressionLevel; +}; + +// @public +export interface DatasetListResponse { + nextLink?: string; + value: DatasetResource[]; +} + +// @public +export interface DatasetLocation { + [property: string]: any; + fileName?: any; + folderPath?: any; + type: "AzureBlobStorageLocation" | "AzureBlobFSLocation" | "AzureDataLakeStoreLocation" | "AmazonS3Location" | "FileServerLocation" | "AzureFileStorageLocation" | "GoogleCloudStorageLocation" | "FtpServerLocation" | "SftpLocation" | "HttpServerLocation" | "HdfsLocation"; +} + +// @public (undocumented) +export type DatasetLocationUnion = AzureBlobStorageLocation | AzureBlobFSLocation | AzureDataLakeStoreLocation | AmazonS3Location | FileServerLocation | AzureFileStorageLocation | GoogleCloudStorageLocation | FtpServerLocation | SftpLocation | HttpServerLocation | HdfsLocation; + +// @public +export interface DatasetReference { + parameters?: { + [propertyName: string]: any; + }; + referenceName: string; + type: DatasetReferenceType; +} + +// @public +export type DatasetReferenceType = string; + +// @public +export type DatasetResource = AzureEntityResource & { + properties: DatasetUnion; +}; + +// @public +export interface DatasetSchemaDataElement { + [property: string]: any; + name?: any; + type?: any; +} + +// @public +export interface DatasetStorageFormat { + [property: string]: any; + deserializer?: any; + serializer?: any; + type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; +} + +// @public (undocumented) +export type DatasetStorageFormatUnion = TextFormat | JsonFormat | AvroFormat | OrcFormat | ParquetFormat; + +// @public (undocumented) +export type DatasetUnion = AvroDataset | ParquetDataset | DelimitedTextDataset | JsonDataset | OrcDataset | BinaryDataset | AzureTableDataset | AzureSqlTableDataset | AzureSqlMITableDataset | AzureSqlDWTableDataset | CassandraTableDataset | CustomDataset | CosmosDbSqlApiCollectionDataset | DocumentDbCollectionDataset | DynamicsEntityDataset | DynamicsCrmEntityDataset | CommonDataServiceForAppsEntityDataset | Office365Dataset | MongoDbCollectionDataset | MongoDbV2CollectionDataset | CosmosDbMongoDbApiCollectionDataset | ODataResourceDataset | OracleTableDataset | TeradataTableDataset | AzureMySqlTableDataset | AmazonRedshiftTableDataset | Db2TableDataset | RelationalTableDataset | InformixTableDataset | OdbcTableDataset | MySqlTableDataset | PostgreSqlTableDataset | MicrosoftAccessTableDataset | SalesforceObjectDataset | SalesforceServiceCloudObjectDataset | SybaseTableDataset | SapBwCubeDataset | SapCloudForCustomerResourceDataset | SapEccResourceDataset | SapHanaTableDataset | SapOpenHubTableDataset | SqlServerTableDataset | RestResourceDataset | SapTableResourceDataset | WebTableDataset | AzureSearchIndexDataset | AmazonMWSObjectDataset | AzurePostgreSqlTableDataset | ConcurObjectDataset | CouchbaseTableDataset | DrillTableDataset | EloquaObjectDataset | GoogleBigQueryObjectDataset | GreenplumTableDataset | HBaseObjectDataset | HiveObjectDataset | HubspotObjectDataset | ImpalaObjectDataset | JiraObjectDataset | MagentoObjectDataset | MariaDBTableDataset | AzureMariaDBTableDataset | MarketoObjectDataset | PaypalObjectDataset | PhoenixObjectDataset | PrestoObjectDataset | QuickBooksObjectDataset | ServiceNowObjectDataset | ShopifyObjectDataset | SparkObjectDataset | SquareObjectDataset | XeroObjectDataset | ZohoObjectDataset | NetezzaTableDataset | VerticaTableDataset | SalesforceMarketingCloudObjectDataset | ResponsysObjectDataset | DynamicsAXResourceDataset | OracleServiceCloudObjectDataset | AzureDataExplorerTableDataset | GoogleAdWordsObjectDataset; + +// @public +export type DatasetZipDeflateCompression = DatasetCompression & { + level?: DatasetCompressionLevel; +}; + +// @public +export type DayOfWeek = "Sunday" | "Monday" | "Tuesday" | "Wednesday" | "Thursday" | "Friday" | "Saturday"; + +// @public +export type Db2AuthenticationType = string; + +// @public +export type Db2LinkedService = LinkedService & { + server: any; + database: any; + authenticationType?: Db2AuthenticationType; + username?: any; + password?: SecretBaseUnion; + packageCollection?: any; + certificateCommonName?: any; + encryptedCredential?: any; +}; + +// @public +export type Db2Source = TabularSource & { + query?: any; +}; + +// @public +export type Db2TableDataset = Dataset & { + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type DeleteActivity = ExecutionActivity & { + recursive?: any; + maxConcurrentConnections?: number; + enableLogging?: any; + logStorageSettings?: LogStorageSettings; + dataset: DatasetReference; +}; + +// @public +export interface DeleteDataFlowDebugSessionRequest { + dataFlowName?: string; + sessionId?: string; +} + +// @public +export type DelimitedTextCompressionCodec = string; + +// @public +export type DelimitedTextDataset = Dataset & { + location?: DatasetLocationUnion; + columnDelimiter?: any; + rowDelimiter?: any; + encodingName?: any; + compressionCodec?: DelimitedTextCompressionCodec; + compressionLevel?: DatasetCompressionLevel; + quoteChar?: any; + escapeChar?: any; + firstRowAsHeader?: any; + nullValue?: any; +}; + +// @public +export type DelimitedTextReadSettings = FormatReadSettings & { + skipLineCount?: any; +}; + +// @public +export type DelimitedTextSink = CopySink & { + storeSettings?: StoreWriteSettingsUnion; + formatSettings?: DelimitedTextWriteSettings; +}; + +// @public +export type DelimitedTextSource = CopySource & { + storeSettings?: StoreReadSettingsUnion; + formatSettings?: DelimitedTextReadSettings; +}; + +// @public +export type DelimitedTextWriteSettings = FormatWriteSettings & { + quoteAllText?: any; + fileExtension: any; +}; + +// @public +export type DependencyCondition = string; + +// @public +export interface DependencyReference { + type: "TriggerDependencyReference" | "TumblingWindowTriggerDependencyReference" | "SelfDependencyTumblingWindowTriggerReference"; +} + +// @public (undocumented) +export type DependencyReferenceUnion = TriggerDependencyReferenceUnion | SelfDependencyTumblingWindowTriggerReference; + +// @public +export interface DistcpSettings { + distcpOptions?: any; + resourceManagerEndpoint: any; + tempScriptPath: any; +} + +// @public +export type DocumentDbCollectionDataset = Dataset & { + collectionName: any; +}; + +// @public +export type DocumentDbCollectionSink = CopySink & { + nestingSeparator?: any; + writeBehavior?: any; +}; + +// @public +export type DocumentDbCollectionSource = CopySource & { + query?: any; + nestingSeparator?: any; + queryTimeout?: any; +}; + +// @public +export type DrillLinkedService = LinkedService & { + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type DrillSource = TabularSource & { + query?: any; +}; + +// @public +export type DrillTableDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export interface DWCopyCommandDefaultValue { + columnName?: any; + defaultValue?: any; +} + +// @public +export interface DWCopyCommandSettings { + additionalOptions?: { + [propertyName: string]: string; + }; + defaultValues?: DWCopyCommandDefaultValue[]; +} + +// @public +export type DynamicsAuthenticationType = string; + +// @public +export type DynamicsAXLinkedService = LinkedService & { + url: any; + servicePrincipalId: any; + servicePrincipalKey: SecretBaseUnion; + tenant: any; + aadResourceId: any; + encryptedCredential?: any; +}; + +// @public +export type DynamicsAXResourceDataset = Dataset & { + path: any; +}; + +// @public +export type DynamicsAXSource = TabularSource & { + query?: any; +}; + +// @public +export type DynamicsCrmEntityDataset = Dataset & { + entityName?: any; +}; + +// @public +export type DynamicsCrmLinkedService = LinkedService & { + deploymentType: DynamicsDeploymentType; + hostName?: any; + port?: any; + serviceUri?: any; + organizationName?: any; + authenticationType: DynamicsAuthenticationType; + username?: any; + password?: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredential?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type DynamicsCrmSink = CopySink & { + writeBehavior: DynamicsSinkWriteBehavior; + ignoreNullValues?: any; + alternateKeyName?: any; +}; + +// @public +export type DynamicsCrmSource = CopySource & { + query?: any; +}; + +// @public +export type DynamicsDeploymentType = string; + +// @public +export type DynamicsEntityDataset = Dataset & { + entityName?: any; +}; + +// @public +export type DynamicsLinkedService = LinkedService & { + deploymentType: DynamicsDeploymentType; + hostName?: string; + port?: string; + serviceUri?: string; + organizationName?: string; + authenticationType: DynamicsAuthenticationType; + username?: any; + password?: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredential?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type DynamicsServicePrincipalCredentialType = string; + +// @public +export type DynamicsSink = CopySink & { + writeBehavior: DynamicsSinkWriteBehavior; + ignoreNullValues?: any; + alternateKeyName?: any; +}; + +// @public +export type DynamicsSinkWriteBehavior = string; + +// @public +export type DynamicsSource = CopySource & { + query?: any; +}; + +// @public +export type EloquaLinkedService = LinkedService & { + endpoint: any; + username: any; + password?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type EloquaObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type EloquaSource = TabularSource & { + query?: any; +}; + +// @public +export interface EncryptionDetails { + cmk?: CustomerManagedKeyDetails; + readonly doubleEncryptionEnabled?: boolean; +} + +// @public +export interface EntityReference { + referenceName?: string; + type?: IntegrationRuntimeEntityReferenceType; +} + +// @public +export interface ErrorAdditionalInfo { + readonly info?: any; + readonly type?: string; +} + +// @public +export interface ErrorContract { + error?: ErrorResponse; +} + +// @public +export interface ErrorResponse { + readonly additionalInfo?: ErrorAdditionalInfo[]; + readonly code?: string; + readonly details?: ErrorResponse[]; + readonly message?: string; + readonly target?: string; +} + +// @public +export interface EvaluateDataFlowExpressionRequest { + dataFlowName?: string; + expression?: string; + rowLimits?: number; + sessionId?: string; + streamName?: string; +} + +// @public +export type EventSubscriptionStatus = string; + +// @public +export type ExecuteDataFlowActivity = ExecutionActivity & { + dataFlow: DataFlowReference; + staging?: DataFlowStagingInfo; + integrationRuntime?: IntegrationRuntimeReference; + compute?: ExecuteDataFlowActivityTypePropertiesCompute; +}; + +// @public +export interface ExecuteDataFlowActivityTypePropertiesCompute { + computeType?: DataFlowComputeType; + coreCount?: number; +} + +// @public +export type ExecutePipelineActivity = Activity & { + pipeline: PipelineReference; + parameters?: { + [propertyName: string]: any; + }; + waitOnCompletion?: boolean; +}; + +// @public +export type ExecuteSsisPackageActivity = ExecutionActivity & { + packageLocation: SsisPackageLocation; + runtime?: any; + loggingLevel?: any; + environmentPath?: any; + executionCredential?: SsisExecutionCredential; + connectVia: IntegrationRuntimeReference; + projectParameters?: { + [propertyName: string]: SsisExecutionParameter; + }; + packageParameters?: { + [propertyName: string]: SsisExecutionParameter; + }; + projectConnectionManagers?: { + [propertyName: string]: any; + }; + packageConnectionManagers?: { + [propertyName: string]: any; + }; + propertyOverrides?: { + [propertyName: string]: SsisPropertyOverride; + }; + logLocation?: SsisLogLocation; +}; + +// @public +export type ExecutionActivity = Activity & { + linkedServiceName?: LinkedServiceReference; + policy?: ActivityPolicy; +}; + +// @public (undocumented) +export type ExecutionActivityUnion = CopyActivity | HDInsightHiveActivity | HDInsightPigActivity | HDInsightMapReduceActivity | HDInsightStreamingActivity | HDInsightSparkActivity | ExecuteSsisPackageActivity | CustomActivity | SqlServerStoredProcedureActivity | DeleteActivity | AzureDataExplorerCommandActivity | LookupActivity | WebActivity | GetMetadataActivity | AzureMLBatchExecutionActivity | AzureMLUpdateResourceActivity | AzureMLExecutePipelineActivity | DataLakeAnalyticsUsqlActivity | DatabricksNotebookActivity | DatabricksSparkJarActivity | DatabricksSparkPythonActivity | AzureFunctionActivity | ExecuteDataFlowActivity; + +// @public +export interface ExposureControlRequest { + featureName?: string; + featureType?: string; +} + +// @public +export interface ExposureControlResponse { + readonly featureName?: string; + readonly value?: string; +} + +// @public +export interface Expression { + type: ExpressionType; + value: string; +} + +// @public +export type ExpressionType = string; + +// @public +export type FileServerLinkedService = LinkedService & { + host: any; + userId?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type FileServerLocation = DatasetLocation & {}; + +// @public +export type FileServerReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type FileServerWriteSettings = StoreWriteSettings & {}; + +// @public +export type FileSystemSink = CopySink & { + copyBehavior?: any; +}; + +// @public +export type FileSystemSource = CopySource & { + recursive?: any; +}; + +// @public +export type FilterActivity = Activity & { + items: Expression; + condition: Expression; +}; + +// @public +export type ForEachActivity = Activity & { + isSequential?: boolean; + batchCount?: number; + items: Expression; + activities: ActivityUnion[]; +}; + +// @public +export interface FormatReadSettings { + [property: string]: any; + type: "DelimitedTextReadSettings"; +} + +// @public (undocumented) +export type FormatReadSettingsUnion = DelimitedTextReadSettings; + +// @public +export interface FormatWriteSettings { + [property: string]: any; + type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; +} + +// @public (undocumented) +export type FormatWriteSettingsUnion = AvroWriteSettings | DelimitedTextWriteSettings | JsonWriteSettings; + +// @public +export type FtpAuthenticationType = string; + +// @public +export type FtpReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + useBinaryTransfer?: boolean; +}; + +// @public +export type FtpServerLinkedService = LinkedService & { + host: any; + port?: any; + authenticationType?: FtpAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; + enableSsl?: any; + enableServerCertificateValidation?: any; +}; + +// @public +export type FtpServerLocation = DatasetLocation & {}; + +// @public +export type GetMetadataActivity = ExecutionActivity & { + dataset: DatasetReference; + fieldList?: any[]; +}; + +// @public +export interface GetSsisObjectMetadataRequest { + metadataPath?: string; +} + +// @public (undocumented) +export interface GitHubAccessTokenRequest { + gitHubAccessCode: string; + gitHubAccessTokenBaseUrl: string; + gitHubClientId: string; +} + +// @public (undocumented) +export interface GitHubAccessTokenResponse { + // (undocumented) + gitHubAccessToken?: string; +} + +// @public +export type GoogleAdWordsAuthenticationType = string; + +// @public +export type GoogleAdWordsLinkedService = LinkedService & { + clientCustomerID: any; + developerToken: SecretBaseUnion; + authenticationType: GoogleAdWordsAuthenticationType; + refreshToken?: SecretBaseUnion; + clientId?: any; + clientSecret?: SecretBaseUnion; + email?: any; + keyFilePath?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + encryptedCredential?: any; +}; + +// @public +export type GoogleAdWordsObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type GoogleAdWordsSource = TabularSource & { + query?: any; +}; + +// @public +export type GoogleBigQueryAuthenticationType = string; + +// @public +export type GoogleBigQueryLinkedService = LinkedService & { + project: any; + additionalProjects?: any; + requestGoogleDriveScope?: any; + authenticationType: GoogleBigQueryAuthenticationType; + refreshToken?: SecretBaseUnion; + clientId?: any; + clientSecret?: SecretBaseUnion; + email?: any; + keyFilePath?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + encryptedCredential?: any; +}; + +// @public +export type GoogleBigQueryObjectDataset = Dataset & { + tableName?: any; + table?: any; + dataset?: any; +}; + +// @public +export type GoogleBigQuerySource = TabularSource & { + query?: any; +}; + +// @public +export type GoogleCloudStorageLinkedService = LinkedService & { + accessKeyId?: any; + secretAccessKey?: SecretBaseUnion; + serviceUrl?: any; + encryptedCredential?: any; +}; + +// @public +export type GoogleCloudStorageLocation = DatasetLocation & { + bucketName?: any; + version?: any; +}; + +// @public +export type GoogleCloudStorageReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + prefix?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type GreenplumLinkedService = LinkedService & { + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type GreenplumSource = TabularSource & { + query?: any; +}; + +// @public +export type GreenplumTableDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type HBaseAuthenticationType = string; + +// @public +export type HBaseLinkedService = LinkedService & { + host: any; + port?: any; + httpPath?: any; + authenticationType: HBaseAuthenticationType; + username?: any; + password?: SecretBaseUnion; + enableSsl?: any; + trustedCertPath?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type HBaseObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type HBaseSource = TabularSource & { + query?: any; +}; + +// @public +export type HdfsLinkedService = LinkedService & { + url: any; + authenticationType?: any; + encryptedCredential?: any; + userName?: any; + password?: SecretBaseUnion; +}; + +// @public +export type HdfsLocation = DatasetLocation & {}; + +// @public +export type HdfsReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; + distcpSettings?: DistcpSettings; +}; + +// @public +export type HdfsSource = CopySource & { + recursive?: any; + distcpSettings?: DistcpSettings; +}; + +// @public +export type HdiNodeTypes = string; + +// @public +export type HDInsightActivityDebugInfoOption = string; + +// @public +export type HDInsightHiveActivity = ExecutionActivity & { + storageLinkedServices?: LinkedServiceReference[]; + arguments?: any[]; + getDebugInfo?: HDInsightActivityDebugInfoOption; + scriptPath?: any; + scriptLinkedService?: LinkedServiceReference; + defines?: { + [propertyName: string]: any; + }; + variables?: any[]; + queryTimeout?: number; +}; + +// @public +export type HDInsightLinkedService = LinkedService & { + clusterUri: any; + userName?: any; + password?: SecretBaseUnion; + linkedServiceName?: LinkedServiceReference; + hcatalogLinkedServiceName?: LinkedServiceReference; + encryptedCredential?: any; + isEspEnabled?: any; + fileSystem?: any; +}; + +// @public +export type HDInsightMapReduceActivity = ExecutionActivity & { + storageLinkedServices?: LinkedServiceReference[]; + arguments?: any[]; + getDebugInfo?: HDInsightActivityDebugInfoOption; + className: any; + jarFilePath: any; + jarLinkedService?: LinkedServiceReference; + jarLibs?: any[]; + defines?: { + [propertyName: string]: any; + }; +}; + +// @public +export type HDInsightOnDemandLinkedService = LinkedService & { + clusterSize: any; + timeToLive: any; + version: any; + linkedServiceName: LinkedServiceReference; + hostSubscriptionId: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant: any; + clusterResourceGroup: any; + clusterNamePrefix?: any; + clusterUserName?: any; + clusterPassword?: SecretBaseUnion; + clusterSshUserName?: any; + clusterSshPassword?: SecretBaseUnion; + additionalLinkedServiceNames?: LinkedServiceReference[]; + hcatalogLinkedServiceName?: LinkedServiceReference; + clusterType?: any; + sparkVersion?: any; + coreConfiguration?: any; + hBaseConfiguration?: any; + hdfsConfiguration?: any; + hiveConfiguration?: any; + mapReduceConfiguration?: any; + oozieConfiguration?: any; + stormConfiguration?: any; + yarnConfiguration?: any; + encryptedCredential?: any; + headNodeSize?: any; + dataNodeSize?: any; + zookeeperNodeSize?: any; + scriptActions?: ScriptAction[]; + virtualNetworkId?: any; + subnetName?: any; +}; + +// @public +export type HDInsightPigActivity = ExecutionActivity & { + storageLinkedServices?: LinkedServiceReference[]; + arguments?: any; + getDebugInfo?: HDInsightActivityDebugInfoOption; + scriptPath?: any; + scriptLinkedService?: LinkedServiceReference; + defines?: { + [propertyName: string]: any; + }; +}; + +// @public +export type HDInsightSparkActivity = ExecutionActivity & { + rootPath: any; + entryFilePath: any; + arguments?: any[]; + getDebugInfo?: HDInsightActivityDebugInfoOption; + sparkJobLinkedService?: LinkedServiceReference; + className?: string; + proxyUser?: any; + sparkConfig?: { + [propertyName: string]: any; + }; +}; + +// @public +export type HDInsightStreamingActivity = ExecutionActivity & { + storageLinkedServices?: LinkedServiceReference[]; + arguments?: any[]; + getDebugInfo?: HDInsightActivityDebugInfoOption; + mapper: any; + reducer: any; + input: any; + output: any; + filePaths: any[]; + fileLinkedService?: LinkedServiceReference; + combiner?: any; + commandEnvironment?: any[]; + defines?: { + [propertyName: string]: any; + }; +}; + +// @public +export type HiveAuthenticationType = string; + +// @public +export type HiveLinkedService = LinkedService & { + host: any; + port?: any; + serverType?: HiveServerType; + thriftTransportProtocol?: HiveThriftTransportProtocol; + authenticationType: HiveAuthenticationType; + serviceDiscoveryMode?: any; + zooKeeperNameSpace?: any; + useNativeQuery?: any; + username?: any; + password?: SecretBaseUnion; + httpPath?: any; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type HiveObjectDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type HiveServerType = string; + +// @public +export type HiveSource = TabularSource & { + query?: any; +}; + +// @public +export type HiveThriftTransportProtocol = string; + +// @public +export type HttpAuthenticationType = string; + +// @public +export type HttpLinkedService = LinkedService & { + url: any; + authenticationType?: HttpAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + embeddedCertData?: any; + certThumbprint?: any; + encryptedCredential?: any; + enableServerCertificateValidation?: any; +}; + +// @public +export type HttpReadSettings = StoreReadSettings & { + requestMethod?: any; + requestBody?: any; + additionalHeaders?: any; + requestTimeout?: any; +}; + +// @public +export type HttpServerLocation = DatasetLocation & { + relativeUrl?: any; +}; + +// @public +export type HttpSource = CopySource & { + httpRequestTimeout?: any; +}; + +// @public +export type HubspotLinkedService = LinkedService & { + clientId: any; + clientSecret?: SecretBaseUnion; + accessToken?: SecretBaseUnion; + refreshToken?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type HubspotObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type HubspotSource = TabularSource & { + query?: any; +}; + +// @public +export type IfConditionActivity = Activity & { + expression: Expression; + ifTrueActivities?: ActivityUnion[]; + ifFalseActivities?: ActivityUnion[]; +}; + +// @public +export type ImpalaAuthenticationType = string; + +// @public +export type ImpalaLinkedService = LinkedService & { + host: any; + port?: any; + authenticationType: ImpalaAuthenticationType; + username?: any; + password?: SecretBaseUnion; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type ImpalaObjectDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type ImpalaSource = TabularSource & { + query?: any; +}; + +// @public +export type InformixLinkedService = LinkedService & { + connectionString: any; + authenticationType?: any; + credential?: SecretBaseUnion; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type InformixSink = CopySink & { + preCopyScript?: any; +}; + +// @public +export type InformixSource = TabularSource & { + query?: any; +}; + +// @public +export type InformixTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export interface IntegrationRuntime { + [property: string]: any; + description?: string; + type: "Managed" | "SelfHosted"; +} + +// @public +export interface IntegrationRuntimeComputeProperties { + [property: string]: any; + dataFlowProperties?: IntegrationRuntimeDataFlowProperties; + location?: string; + maxParallelExecutionsPerNode?: number; + nodeSize?: string; + numberOfNodes?: number; + vNetProperties?: IntegrationRuntimeVNetProperties; +} + +// @public +export interface IntegrationRuntimeCustomSetupScriptProperties { + blobContainerUri?: string; + sasToken?: SecureString; +} + +// @public +export interface IntegrationRuntimeDataFlowProperties { + [property: string]: any; + computeType?: DataFlowComputeType; + coreCount?: number; + timeToLive?: number; +} + +// @public +export interface IntegrationRuntimeDataProxyProperties { + connectVia?: EntityReference; + path?: string; + stagingLinkedService?: EntityReference; +} + +// @public +export type IntegrationRuntimeEdition = string; + +// @public +export type IntegrationRuntimeEntityReferenceType = string; + +// @public +export type IntegrationRuntimeLicenseType = string; + +// @public +export interface IntegrationRuntimeListResponse { + nextLink?: string; + value: IntegrationRuntimeResource[]; +} + +// @public +export interface IntegrationRuntimeReference { + parameters?: { + [propertyName: string]: any; + }; + referenceName: string; + type: IntegrationRuntimeReferenceType; +} + +// @public +export type IntegrationRuntimeReferenceType = string; + +// @public +export type IntegrationRuntimeResource = AzureEntityResource & { + properties: IntegrationRuntimeUnion; +}; + +// @public +export type IntegrationRuntimesGetResponse = IntegrationRuntimeResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: IntegrationRuntimeResource; + }; +}; + +// @public +export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: IntegrationRuntimeListResponse; + }; +}; + +// @public +export interface IntegrationRuntimeSsisCatalogInfo { + [property: string]: any; + catalogAdminPassword?: SecureString; + catalogAdminUserName?: string; + catalogPricingTier?: IntegrationRuntimeSsisCatalogPricingTier; + catalogServerEndpoint?: string; +} + +// @public +export type IntegrationRuntimeSsisCatalogPricingTier = string; + +// @public +export interface IntegrationRuntimeSsisProperties { + [property: string]: any; + catalogInfo?: IntegrationRuntimeSsisCatalogInfo; + customSetupScriptProperties?: IntegrationRuntimeCustomSetupScriptProperties; + dataProxyProperties?: IntegrationRuntimeDataProxyProperties; + edition?: IntegrationRuntimeEdition; + expressCustomSetupProperties?: CustomSetupBase[]; + licenseType?: IntegrationRuntimeLicenseType; +} + +// @public +export type IntegrationRuntimeState = string; + +// @public +export type IntegrationRuntimeType = string; + +// @public (undocumented) +export type IntegrationRuntimeUnion = ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; + +// @public +export interface IntegrationRuntimeVNetProperties { + [property: string]: any; + publicIPs?: string[]; + subnet?: string; + vNetId?: string; +} + +// @public +export type JiraLinkedService = LinkedService & { + host: any; + port?: any; + username: any; + password?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type JiraObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type JiraSource = TabularSource & { + query?: any; +}; + +// @public +export type JsonDataset = Dataset & { + location?: DatasetLocationUnion; + encodingName?: any; + compression?: DatasetCompressionUnion; +}; + +// @public +export type JsonFormat = DatasetStorageFormat & { + filePattern?: JsonFormatFilePattern; + nestingSeparator?: any; + encodingName?: any; + jsonNodeReference?: any; + jsonPathDefinition?: any; +}; + +// @public +export type JsonFormatFilePattern = string; + +// @public +export type JsonSink = CopySink & { + storeSettings?: StoreWriteSettingsUnion; + formatSettings?: JsonWriteSettings; +}; + +// @public +export type JsonSource = CopySource & { + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export type JsonWriteFilePattern = string; + +// @public +export type JsonWriteSettings = FormatWriteSettings & { + filePattern?: JsonWriteFilePattern; +}; + +// @public +export const enum KnownAvroCompressionCodec { + // (undocumented) + Bzip2 = "bzip2", + // (undocumented) + Deflate = "deflate", + // (undocumented) + None = "none", + // (undocumented) + Snappy = "snappy", + // (undocumented) + Xz = "xz" +} + +// @public +export const enum KnownAzureFunctionActivityMethod { + // (undocumented) + Delete = "DELETE", + // (undocumented) + GET = "GET", + // (undocumented) + Head = "HEAD", + // (undocumented) + Options = "OPTIONS", + // (undocumented) + Post = "POST", + // (undocumented) + PUT = "PUT", + // (undocumented) + Trace = "TRACE" +} + +// @public +export const enum KnownAzureSearchIndexWriteBehaviorType { + // (undocumented) + Merge = "Merge", + // (undocumented) + Upload = "Upload" +} + +// @public +export const enum KnownBigDataPoolReferenceType { + // (undocumented) + BigDataPoolReference = "BigDataPoolReference" +} + +// @public +export const enum KnownBlobEventTypes { + // (undocumented) + MicrosoftStorageBlobCreated = "Microsoft.Storage.BlobCreated", + // (undocumented) + MicrosoftStorageBlobDeleted = "Microsoft.Storage.BlobDeleted" +} + +// @public +export const enum KnownCassandraSourceReadConsistencyLevels { + // (undocumented) + ALL = "ALL", + // (undocumented) + EachQuorum = "EACH_QUORUM", + // (undocumented) + LocalONE = "LOCAL_ONE", + // (undocumented) + LocalQuorum = "LOCAL_QUORUM", + // (undocumented) + LocalSerial = "LOCAL_SERIAL", + // (undocumented) + ONE = "ONE", + // (undocumented) + Quorum = "QUORUM", + // (undocumented) + Serial = "SERIAL", + // (undocumented) + Three = "THREE", + // (undocumented) + TWO = "TWO" +} + +// @public +export const enum KnownCellOutputType { + // (undocumented) + DisplayData = "display_data", + // (undocumented) + Error = "error", + // (undocumented) + ExecuteResult = "execute_result", + // (undocumented) + Stream = "stream" +} + +// @public +export const enum KnownCopyBehaviorType { + // (undocumented) + FlattenHierarchy = "FlattenHierarchy", + // (undocumented) + MergeFiles = "MergeFiles", + // (undocumented) + PreserveHierarchy = "PreserveHierarchy" +} + +// @public +export const enum KnownDataFlowComputeType { + // (undocumented) + ComputeOptimized = "ComputeOptimized", + // (undocumented) + General = "General", + // (undocumented) + MemoryOptimized = "MemoryOptimized" +} + +// @public +export const enum KnownDataFlowReferenceType { + // (undocumented) + DataFlowReference = "DataFlowReference" +} + +// @public +export const enum KnownDatasetCompressionLevel { + // (undocumented) + Fastest = "Fastest", + // (undocumented) + Optimal = "Optimal" +} + +// @public +export const enum KnownDatasetReferenceType { + // (undocumented) + DatasetReference = "DatasetReference" +} + +// @public +export const enum KnownDb2AuthenticationType { + // (undocumented) + Basic = "Basic" +} + +// @public +export const enum KnownDelimitedTextCompressionCodec { + // (undocumented) + Bzip2 = "bzip2", + // (undocumented) + Deflate = "deflate", + // (undocumented) + Gzip = "gzip", + // (undocumented) + Lz4 = "lz4", + // (undocumented) + Snappy = "snappy", + // (undocumented) + ZipDeflate = "zipDeflate" +} + +// @public +export const enum KnownDependencyCondition { + // (undocumented) + Completed = "Completed", + // (undocumented) + Failed = "Failed", + // (undocumented) + Skipped = "Skipped", + // (undocumented) + Succeeded = "Succeeded" +} + +// @public +export const enum KnownDynamicsAuthenticationType { + // (undocumented) + AADServicePrincipal = "AADServicePrincipal", + // (undocumented) + Ifd = "Ifd", + // (undocumented) + Office365 = "Office365" +} + +// @public +export const enum KnownDynamicsDeploymentType { + // (undocumented) + Online = "Online", + // (undocumented) + OnPremisesWithIfd = "OnPremisesWithIfd" +} + +// @public +export const enum KnownDynamicsServicePrincipalCredentialType { + // (undocumented) + ServicePrincipalCert = "ServicePrincipalCert", + // (undocumented) + ServicePrincipalKey = "ServicePrincipalKey" +} + +// @public +export const enum KnownDynamicsSinkWriteBehavior { + // (undocumented) + Upsert = "Upsert" +} + +// @public +export const enum KnownEventSubscriptionStatus { + // (undocumented) + Deprovisioning = "Deprovisioning", + // (undocumented) + Disabled = "Disabled", + // (undocumented) + Enabled = "Enabled", + // (undocumented) + Provisioning = "Provisioning", + // (undocumented) + Unknown = "Unknown" +} + +// @public +export const enum KnownExpressionType { + // (undocumented) + Expression = "Expression" +} + +// @public +export const enum KnownFtpAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic" +} + +// @public +export const enum KnownGoogleAdWordsAuthenticationType { + // (undocumented) + ServiceAuthentication = "ServiceAuthentication", + // (undocumented) + UserAuthentication = "UserAuthentication" +} + +// @public +export const enum KnownGoogleBigQueryAuthenticationType { + // (undocumented) + ServiceAuthentication = "ServiceAuthentication", + // (undocumented) + UserAuthentication = "UserAuthentication" +} + +// @public +export const enum KnownHBaseAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic" +} + +// @public +export const enum KnownHdiNodeTypes { + // (undocumented) + Headnode = "Headnode", + // (undocumented) + Workernode = "Workernode", + // (undocumented) + Zookeeper = "Zookeeper" +} + +// @public +export const enum KnownHDInsightActivityDebugInfoOption { + // (undocumented) + Always = "Always", + // (undocumented) + Failure = "Failure", + // (undocumented) + None = "None" +} + +// @public +export const enum KnownHiveAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Username = "Username", + // (undocumented) + UsernameAndPassword = "UsernameAndPassword", + // (undocumented) + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +// @public +export const enum KnownHiveServerType { + // (undocumented) + HiveServer1 = "HiveServer1", + // (undocumented) + HiveServer2 = "HiveServer2", + // (undocumented) + HiveThriftServer = "HiveThriftServer" +} + +// @public +export const enum KnownHiveThriftTransportProtocol { + // (undocumented) + Binary = "Binary", + // (undocumented) + Http = "HTTP ", + // (undocumented) + Sasl = "SASL" +} + +// @public +export const enum KnownHttpAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic", + // (undocumented) + ClientCertificate = "ClientCertificate", + // (undocumented) + Digest = "Digest", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownImpalaAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + SaslUsername = "SASLUsername", + // (undocumented) + UsernameAndPassword = "UsernameAndPassword" +} + +// @public +export const enum KnownIntegrationRuntimeEdition { + // (undocumented) + Enterprise = "Enterprise", + // (undocumented) + Standard = "Standard" +} + +// @public +export const enum KnownIntegrationRuntimeEntityReferenceType { + // (undocumented) + IntegrationRuntimeReference = "IntegrationRuntimeReference", + // (undocumented) + LinkedServiceReference = "LinkedServiceReference" +} + +// @public +export const enum KnownIntegrationRuntimeLicenseType { + // (undocumented) + BasePrice = "BasePrice", + // (undocumented) + LicenseIncluded = "LicenseIncluded" +} + +// @public +export const enum KnownIntegrationRuntimeReferenceType { + // (undocumented) + IntegrationRuntimeReference = "IntegrationRuntimeReference" +} + +// @public +export const enum KnownIntegrationRuntimeSsisCatalogPricingTier { + // (undocumented) + Basic = "Basic", + // (undocumented) + Premium = "Premium", + // (undocumented) + PremiumRS = "PremiumRS", + // (undocumented) + Standard = "Standard" +} + +// @public +export const enum KnownIntegrationRuntimeState { + // (undocumented) + AccessDenied = "AccessDenied", + // (undocumented) + Initial = "Initial", + // (undocumented) + Limited = "Limited", + // (undocumented) + NeedRegistration = "NeedRegistration", + // (undocumented) + Offline = "Offline", + // (undocumented) + Online = "Online", + // (undocumented) + Started = "Started", + // (undocumented) + Starting = "Starting", + // (undocumented) + Stopped = "Stopped", + // (undocumented) + Stopping = "Stopping" +} + +// @public +export const enum KnownIntegrationRuntimeType { + // (undocumented) + Managed = "Managed", + // (undocumented) + SelfHosted = "SelfHosted" +} + +// @public +export const enum KnownJsonFormatFilePattern { + // (undocumented) + ArrayOfObjects = "arrayOfObjects", + // (undocumented) + SetOfObjects = "setOfObjects" +} + +// @public +export const enum KnownJsonWriteFilePattern { + // (undocumented) + ArrayOfObjects = "arrayOfObjects", + // (undocumented) + SetOfObjects = "setOfObjects" +} + +// @public +export const enum KnownMongoDbAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic" +} + +// @public +export const enum KnownNetezzaPartitionOption { + // (undocumented) + DataSlice = "DataSlice", + // (undocumented) + DynamicRange = "DynamicRange", + // (undocumented) + None = "None" +} + +// @public +export const enum KnownNodeSize { + // (undocumented) + Large = "Large", + // (undocumented) + Medium = "Medium", + // (undocumented) + None = "None", + // (undocumented) + Small = "Small", + // (undocumented) + XLarge = "XLarge", + // (undocumented) + XXLarge = "XXLarge", + // (undocumented) + XXXLarge = "XXXLarge" +} + +// @public +export const enum KnownNodeSizeFamily { + // (undocumented) + MemoryOptimized = "MemoryOptimized", + // (undocumented) + None = "None" +} + +// @public +export const enum KnownNotebookReferenceType { + // (undocumented) + NotebookReference = "NotebookReference" +} + +// @public +export const enum KnownODataAadServicePrincipalCredentialType { + // (undocumented) + ServicePrincipalCert = "ServicePrincipalCert", + // (undocumented) + ServicePrincipalKey = "ServicePrincipalKey" +} + +// @public +export const enum KnownODataAuthenticationType { + // (undocumented) + AadServicePrincipal = "AadServicePrincipal", + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic", + // (undocumented) + ManagedServiceIdentity = "ManagedServiceIdentity", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownOraclePartitionOption { + // (undocumented) + DynamicRange = "DynamicRange", + // (undocumented) + None = "None", + // (undocumented) + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable" +} + +// @public +export const enum KnownOrcCompressionCodec { + // (undocumented) + None = "none", + // (undocumented) + Snappy = "snappy", + // (undocumented) + Zlib = "zlib" +} + +// @public +export const enum KnownParameterType { + // (undocumented) + Array = "Array", + // (undocumented) + Bool = "Bool", + // (undocumented) + Float = "Float", + // (undocumented) + Int = "Int", + // (undocumented) + Object = "Object", + // (undocumented) + SecureString = "SecureString", + // (undocumented) + String = "String" +} + +// @public +export const enum KnownParquetCompressionCodec { + // (undocumented) + Gzip = "gzip", + // (undocumented) + Lzo = "lzo", + // (undocumented) + None = "none", + // (undocumented) + Snappy = "snappy" +} + +// @public +export const enum KnownPhoenixAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + UsernameAndPassword = "UsernameAndPassword", + // (undocumented) + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +// @public +export const enum KnownPipelineReferenceType { + // (undocumented) + PipelineReference = "PipelineReference" +} + +// @public +export const enum KnownPluginCurrentState { + // (undocumented) + Cleanup = "Cleanup", + // (undocumented) + Ended = "Ended", + // (undocumented) + Monitoring = "Monitoring", + // (undocumented) + Preparation = "Preparation", + // (undocumented) + Queued = "Queued", + // (undocumented) + ResourceAcquisition = "ResourceAcquisition", + // (undocumented) + Submission = "Submission" +} + +// @public +export const enum KnownPolybaseSettingsRejectType { + // (undocumented) + Percentage = "percentage", + // (undocumented) + Value = "value" +} + +// @public +export const enum KnownPrestoAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Ldap = "LDAP" +} + +// @public +export const enum KnownRecurrenceFrequency { + // (undocumented) + Day = "Day", + // (undocumented) + Hour = "Hour", + // (undocumented) + Minute = "Minute", + // (undocumented) + Month = "Month", + // (undocumented) + NotSpecified = "NotSpecified", + // (undocumented) + Week = "Week", + // (undocumented) + Year = "Year" +} + +// @public +export const enum KnownRestServiceAuthenticationType { + // (undocumented) + AadServicePrincipal = "AadServicePrincipal", + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic", + // (undocumented) + ManagedServiceIdentity = "ManagedServiceIdentity" +} + +// @public +export const enum KnownRunQueryFilterOperand { + // (undocumented) + ActivityName = "ActivityName", + // (undocumented) + ActivityRunEnd = "ActivityRunEnd", + // (undocumented) + ActivityRunStart = "ActivityRunStart", + // (undocumented) + ActivityType = "ActivityType", + // (undocumented) + LatestOnly = "LatestOnly", + // (undocumented) + PipelineName = "PipelineName", + // (undocumented) + RunEnd = "RunEnd", + // (undocumented) + RunGroupId = "RunGroupId", + // (undocumented) + RunStart = "RunStart", + // (undocumented) + Status = "Status", + // (undocumented) + TriggerName = "TriggerName", + // (undocumented) + TriggerRunTimestamp = "TriggerRunTimestamp" +} + +// @public +export const enum KnownRunQueryFilterOperator { + // (undocumented) + Equals = "Equals", + // (undocumented) + In = "In", + // (undocumented) + NotEquals = "NotEquals", + // (undocumented) + NotIn = "NotIn" +} + +// @public +export const enum KnownRunQueryOrder { + // (undocumented) + ASC = "ASC", + // (undocumented) + Desc = "DESC" +} + +// @public +export const enum KnownRunQueryOrderByField { + // (undocumented) + ActivityName = "ActivityName", + // (undocumented) + ActivityRunEnd = "ActivityRunEnd", + // (undocumented) + ActivityRunStart = "ActivityRunStart", + // (undocumented) + PipelineName = "PipelineName", + // (undocumented) + RunEnd = "RunEnd", + // (undocumented) + RunStart = "RunStart", + // (undocumented) + Status = "Status", + // (undocumented) + TriggerName = "TriggerName", + // (undocumented) + TriggerRunTimestamp = "TriggerRunTimestamp" +} + +// @public +export const enum KnownSalesforceSinkWriteBehavior { + // (undocumented) + Insert = "Insert", + // (undocumented) + Upsert = "Upsert" +} + +// @public +export const enum KnownSalesforceSourceReadBehavior { + // (undocumented) + Query = "Query", + // (undocumented) + QueryAll = "QueryAll" +} + +// @public +export const enum KnownSapCloudForCustomerSinkWriteBehavior { + // (undocumented) + Insert = "Insert", + // (undocumented) + Update = "Update" +} + +// @public +export const enum KnownSapHanaAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownSapHanaPartitionOption { + // (undocumented) + None = "None", + // (undocumented) + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", + // (undocumented) + SapHanaDynamicRange = "SapHanaDynamicRange" +} + +// @public +export const enum KnownSapTablePartitionOption { + // (undocumented) + None = "None", + // (undocumented) + PartitionOnCalendarDate = "PartitionOnCalendarDate", + // (undocumented) + PartitionOnCalendarMonth = "PartitionOnCalendarMonth", + // (undocumented) + PartitionOnCalendarYear = "PartitionOnCalendarYear", + // (undocumented) + PartitionOnInt = "PartitionOnInt", + // (undocumented) + PartitionOnTime = "PartitionOnTime" +} + +// @public +export const enum KnownSchedulerCurrentState { + // (undocumented) + Ended = "Ended", + // (undocumented) + Queued = "Queued", + // (undocumented) + Scheduled = "Scheduled" +} + +// @public +export const enum KnownServiceNowAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + OAuth2 = "OAuth2" +} + +// @public +export const enum KnownSftpAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + SshPublicKey = "SshPublicKey" +} + +// @public +export const enum KnownSparkAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Username = "Username", + // (undocumented) + UsernameAndPassword = "UsernameAndPassword", + // (undocumented) + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +// @public +export const enum KnownSparkBatchJobResultType { + // (undocumented) + Cancelled = "Cancelled", + // (undocumented) + Failed = "Failed", + // (undocumented) + Succeeded = "Succeeded", + // (undocumented) + Uncertain = "Uncertain" +} + +// @public +export const enum KnownSparkErrorSource { + // (undocumented) + Dependency = "Dependency", + // (undocumented) + System = "System", + // (undocumented) + Unknown = "Unknown", + // (undocumented) + User = "User" +} + +// @public +export const enum KnownSparkJobReferenceType { + // (undocumented) + SparkJobDefinitionReference = "SparkJobDefinitionReference" +} + +// @public +export const enum KnownSparkJobType { + // (undocumented) + SparkBatch = "SparkBatch", + // (undocumented) + SparkSession = "SparkSession" +} + +// @public +export const enum KnownSparkServerType { + // (undocumented) + SharkServer = "SharkServer", + // (undocumented) + SharkServer2 = "SharkServer2", + // (undocumented) + SparkThriftServer = "SparkThriftServer" +} + +// @public +export const enum KnownSparkThriftTransportProtocol { + // (undocumented) + Binary = "Binary", + // (undocumented) + Http = "HTTP ", + // (undocumented) + Sasl = "SASL" +} + +// @public +export const enum KnownSqlConnectionType { + // (undocumented) + SqlOnDemand = "SqlOnDemand", + // (undocumented) + SqlPool = "SqlPool" +} + +// @public +export const enum KnownSqlPoolReferenceType { + // (undocumented) + SqlPoolReference = "SqlPoolReference" +} + +// @public +export const enum KnownSqlScriptType { + // (undocumented) + SqlQuery = "SqlQuery" +} + +// @public +export const enum KnownSsisLogLocationType { + // (undocumented) + File = "File" +} + +// @public +export const enum KnownSsisPackageLocationType { + // (undocumented) + File = "File", + // (undocumented) + InlinePackage = "InlinePackage", + // (undocumented) + Ssisdb = "SSISDB" +} + +// @public +export const enum KnownStoredProcedureParameterType { + // (undocumented) + Boolean = "Boolean", + // (undocumented) + Date = "Date", + // (undocumented) + Decimal = "Decimal", + // (undocumented) + Guid = "Guid", + // (undocumented) + Int = "Int", + // (undocumented) + Int64 = "Int64", + // (undocumented) + String = "String" +} + +// @public +export const enum KnownSybaseAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownTeradataAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownTeradataPartitionOption { + // (undocumented) + DynamicRange = "DynamicRange", + // (undocumented) + Hash = "Hash", + // (undocumented) + None = "None" +} + +// @public +export const enum KnownTriggerReferenceType { + // (undocumented) + TriggerReference = "TriggerReference" +} + +// @public +export const enum KnownTriggerRunStatus { + // (undocumented) + Failed = "Failed", + // (undocumented) + Inprogress = "Inprogress", + // (undocumented) + Succeeded = "Succeeded" +} + +// @public +export const enum KnownTriggerRuntimeState { + // (undocumented) + Disabled = "Disabled", + // (undocumented) + Started = "Started", + // (undocumented) + Stopped = "Stopped" +} + +// @public +export const enum KnownTumblingWindowFrequency { + // (undocumented) + Hour = "Hour", + // (undocumented) + Minute = "Minute" +} + +// @public +export const enum KnownType { + // (undocumented) + LinkedServiceReference = "LinkedServiceReference" +} + +// @public +export const enum KnownVariableType { + // (undocumented) + Array = "Array", + // (undocumented) + Bool = "Bool", + // (undocumented) + Boolean = "Boolean", + // (undocumented) + String = "String" +} + +// @public +export const enum KnownWebActivityMethod { + // (undocumented) + Delete = "DELETE", + // (undocumented) + GET = "GET", + // (undocumented) + Post = "POST", + // (undocumented) + PUT = "PUT" +} + +// @public +export const enum KnownWebAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic", + // (undocumented) + ClientCertificate = "ClientCertificate" +} + +// @public +export const enum KnownWebHookActivityMethod { + // (undocumented) + Post = "POST" +} + +// @public +export interface LibraryRequirements { + content?: string; + filename?: string; + readonly time?: Date; +} + +// @public +export type LinkedIntegrationRuntimeKeyAuthorization = LinkedIntegrationRuntimeType & { + key: SecureString; +}; + +// @public +export type LinkedIntegrationRuntimeRbacAuthorization = LinkedIntegrationRuntimeType & { + resourceId: string; +}; + +// @public +export interface LinkedIntegrationRuntimeType { + authorizationType: "Key" | "RBAC"; +} + +// @public (undocumented) +export type LinkedIntegrationRuntimeTypeUnion = LinkedIntegrationRuntimeKeyAuthorization | LinkedIntegrationRuntimeRbacAuthorization; + +// @public +export interface LinkedService { + [property: string]: any; + annotations?: any[]; + connectVia?: IntegrationRuntimeReference; + description?: string; + parameters?: { + [propertyName: string]: ParameterSpecification; + }; + type: "AzureStorage" | "AzureBlobStorage" | "AzureTableStorage" | "AzureSqlDW" | "SqlServer" | "AzureSqlDatabase" | "AzureSqlMI" | "AzureBatch" | "AzureKeyVault" | "CosmosDb" | "Dynamics" | "DynamicsCrm" | "CommonDataServiceForApps" | "HDInsight" | "FileServer" | "AzureFileStorage" | "GoogleCloudStorage" | "Oracle" | "AzureMySql" | "MySql" | "PostgreSql" | "Sybase" | "Db2" | "Teradata" | "AzureML" | "AzureMLService" | "Odbc" | "Informix" | "MicrosoftAccess" | "Hdfs" | "OData" | "Web" | "Cassandra" | "MongoDb" | "MongoDbV2" | "CosmosDbMongoDbApi" | "AzureDataLakeStore" | "AzureBlobFS" | "Office365" | "Salesforce" | "SalesforceServiceCloud" | "SapCloudForCustomer" | "SapEcc" | "SapOpenHub" | "RestService" | "AmazonS3" | "AmazonRedshift" | "CustomDataSource" | "AzureSearch" | "HttpServer" | "FtpServer" | "Sftp" | "SapBW" | "SapHana" | "AmazonMWS" | "AzurePostgreSql" | "Concur" | "Couchbase" | "Drill" | "Eloqua" | "GoogleBigQuery" | "Greenplum" | "HBase" | "Hive" | "Hubspot" | "Impala" | "Jira" | "Magento" | "MariaDB" | "AzureMariaDB" | "Marketo" | "Paypal" | "Phoenix" | "Presto" | "QuickBooks" | "ServiceNow" | "Shopify" | "Spark" | "Square" | "Xero" | "Zoho" | "Vertica" | "Netezza" | "SalesforceMarketingCloud" | "HDInsightOnDemand" | "AzureDataLakeAnalytics" | "AzureDatabricks" | "Responsys" | "DynamicsAX" | "OracleServiceCloud" | "GoogleAdWords" | "SapTable" | "AzureDataExplorer" | "AzureFunction"; +} + +// @public +export interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: LinkedServiceResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export type LinkedServiceDebugResource = SubResourceDebugResource & { + properties: LinkedServiceUnion; +}; + +// @public +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: LinkedServiceResource; + }; +}; + +// @public +export type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: LinkedServiceListResponse; + }; +}; + +// @public +export type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: LinkedServiceListResponse; + }; +}; + +// @public +export interface LinkedServiceListResponse { + nextLink?: string; + value: LinkedServiceResource[]; +} + +// @public +export interface LinkedServiceReference { + parameters?: { + [propertyName: string]: any; + }; + referenceName: string; + type: Type; +} + +// @public +export type LinkedServiceResource = AzureEntityResource & { + properties: LinkedServiceUnion; +}; + +// @public (undocumented) +export type LinkedServiceUnion = AzureStorageLinkedService | AzureBlobStorageLinkedService | AzureTableStorageLinkedService | AzureSqlDWLinkedService | SqlServerLinkedService | AzureSqlDatabaseLinkedService | AzureSqlMILinkedService | AzureBatchLinkedService | AzureKeyVaultLinkedService | CosmosDbLinkedService | DynamicsLinkedService | DynamicsCrmLinkedService | CommonDataServiceForAppsLinkedService | HDInsightLinkedService | FileServerLinkedService | AzureFileStorageLinkedService | GoogleCloudStorageLinkedService | OracleLinkedService | AzureMySqlLinkedService | MySqlLinkedService | PostgreSqlLinkedService | SybaseLinkedService | Db2LinkedService | TeradataLinkedService | AzureMLLinkedService | AzureMLServiceLinkedService | OdbcLinkedService | InformixLinkedService | MicrosoftAccessLinkedService | HdfsLinkedService | ODataLinkedService | WebLinkedService | CassandraLinkedService | MongoDbLinkedService | MongoDbV2LinkedService | CosmosDbMongoDbApiLinkedService | AzureDataLakeStoreLinkedService | AzureBlobFSLinkedService | Office365LinkedService | SalesforceLinkedService | SalesforceServiceCloudLinkedService | SapCloudForCustomerLinkedService | SapEccLinkedService | SapOpenHubLinkedService | RestServiceLinkedService | AmazonS3LinkedService | AmazonRedshiftLinkedService | CustomDataSourceLinkedService | AzureSearchLinkedService | HttpLinkedService | FtpServerLinkedService | SftpServerLinkedService | SapBWLinkedService | SapHanaLinkedService | AmazonMWSLinkedService | AzurePostgreSqlLinkedService | ConcurLinkedService | CouchbaseLinkedService | DrillLinkedService | EloquaLinkedService | GoogleBigQueryLinkedService | GreenplumLinkedService | HBaseLinkedService | HiveLinkedService | HubspotLinkedService | ImpalaLinkedService | JiraLinkedService | MagentoLinkedService | MariaDBLinkedService | AzureMariaDBLinkedService | MarketoLinkedService | PaypalLinkedService | PhoenixLinkedService | PrestoLinkedService | QuickBooksLinkedService | ServiceNowLinkedService | ShopifyLinkedService | SparkLinkedService | SquareLinkedService | XeroLinkedService | ZohoLinkedService | VerticaLinkedService | NetezzaLinkedService | SalesforceMarketingCloudLinkedService | HDInsightOnDemandLinkedService | AzureDataLakeAnalyticsLinkedService | AzureDatabricksLinkedService | ResponsysLinkedService | DynamicsAXLinkedService | OracleServiceCloudLinkedService | GoogleAdWordsLinkedService | SapTableLinkedService | AzureDataExplorerLinkedService | AzureFunctionLinkedService; + +// @public +export interface LogStorageSettings { + [property: string]: any; + linkedServiceName: LinkedServiceReference; + path?: any; +} + +// @public +export type LookupActivity = ExecutionActivity & { + source: CopySourceUnion; + dataset: DatasetReference; + firstRowOnly?: any; +}; + +// @public +export type MagentoLinkedService = LinkedService & { + host: any; + accessToken?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type MagentoObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type MagentoSource = TabularSource & { + query?: any; +}; + +// @public +export interface ManagedIdentity { + readonly principalId?: string; + readonly tenantId?: string; + type?: ResourceIdentityType; +} + +// @public +export type ManagedIntegrationRuntime = IntegrationRuntime & { + readonly state?: IntegrationRuntimeState; + computeProperties?: IntegrationRuntimeComputeProperties; + ssisProperties?: IntegrationRuntimeSsisProperties; +}; + +// @public +export interface ManagedVirtualNetworkSettings { + allowedAadTenantIdsForLinking?: string[]; + linkedAccessCheckOnTargetResource?: boolean; + preventDataExfiltration?: boolean; +} + +// @public +export type MappingDataFlow = DataFlow & { + sources?: DataFlowSource[]; + sinks?: DataFlowSink[]; + transformations?: Transformation[]; + script?: string; +}; + +// @public +export type MariaDBLinkedService = LinkedService & { + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type MariaDBSource = TabularSource & { + query?: any; +}; + +// @public +export type MariaDBTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export type MarketoLinkedService = LinkedService & { + endpoint: any; + clientId: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type MarketoObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type MarketoSource = TabularSource & { + query?: any; +}; + +// @public +export type MicrosoftAccessLinkedService = LinkedService & { + connectionString: any; + authenticationType?: any; + credential?: SecretBaseUnion; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type MicrosoftAccessSink = CopySink & { + preCopyScript?: any; +}; + +// @public +export type MicrosoftAccessSource = CopySource & { + query?: any; +}; + +// @public +export type MicrosoftAccessTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export type MongoDbAuthenticationType = string; + +// @public +export type MongoDbCollectionDataset = Dataset & { + collectionName: any; +}; + +// @public +export interface MongoDbCursorMethodsProperties { + [property: string]: any; + limit?: any; + project?: any; + skip?: any; + sort?: any; +} + +// @public +export type MongoDbLinkedService = LinkedService & { + server: any; + authenticationType?: MongoDbAuthenticationType; + databaseName: any; + username?: any; + password?: SecretBaseUnion; + authSource?: any; + port?: any; + enableSsl?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type MongoDbSource = CopySource & { + query?: any; +}; + +// @public +export type MongoDbV2CollectionDataset = Dataset & { + collection: any; +}; + +// @public +export type MongoDbV2LinkedService = LinkedService & { + connectionString: any; + database: any; +}; + +// @public +export type MongoDbV2Source = CopySource & { + filter?: any; + cursorMethods?: MongoDbCursorMethodsProperties; + batchSize?: any; + queryTimeout?: any; +}; + +// @public +export type MultiplePipelineTrigger = Trigger & { + pipelines?: TriggerPipelineReference[]; +}; + +// @public (undocumented) +export type MultiplePipelineTriggerUnion = ScheduleTrigger | BlobTrigger | BlobEventsTrigger; + +// @public +export type MySqlLinkedService = LinkedService & { + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type MySqlSource = TabularSource & { + query?: any; +}; + +// @public +export type MySqlTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export type NetezzaLinkedService = LinkedService & { + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type NetezzaPartitionOption = string; + +// @public +export interface NetezzaPartitionSettings { + partitionColumnName?: any; + partitionLowerBound?: any; + partitionUpperBound?: any; +} + +// @public +export type NetezzaSource = TabularSource & { + query?: any; + partitionOption?: NetezzaPartitionOption; + partitionSettings?: NetezzaPartitionSettings; +}; + +// @public +export type NetezzaTableDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type NodeSize = string; + +// @public +export type NodeSizeFamily = string; + +// @public +export interface Notebook { + [property: string]: any; + bigDataPool?: BigDataPoolReference | null; + cells: NotebookCell[]; + description?: string; + metadata: NotebookMetadata; + nbformat: number; + nbformatMinor: number; + sessionProperties?: NotebookSessionProperties | null; +} + +// @public +export interface NotebookCell { + [property: string]: any; + attachments?: any; + cellType: string; + metadata: any; + outputs?: NotebookCellOutputItem[]; + source: string[]; +} + +// @public +export interface NotebookCellOutputItem { + data?: any; + executionCount?: number; + metadata?: any; + name?: string; + outputType: CellOutputType; + text?: any; +} + +// @public +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type NotebookGetNotebookResponse = NotebookResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookResource; + }; +}; + +// @public +export type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookListResponse; + }; +}; + +// @public +export type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookListResponse; + }; +}; + +// @public +export type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookListResponse; + }; +}; + +// @public +export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookListResponse; + }; +}; + +// @public +export interface NotebookKernelSpec { + [property: string]: any; + displayName: string; + name: string; +} + +// @public +export interface NotebookLanguageInfo { + [property: string]: any; + codemirrorMode?: string; + name: string; +} + +// @public +export interface NotebookListResponse { + nextLink?: string; + value: NotebookResource[]; +} + +// @public +export interface NotebookMetadata { + [property: string]: any; + kernelspec?: NotebookKernelSpec; + languageInfo?: NotebookLanguageInfo; +} + +// @public +export type NotebookReferenceType = string; + +// @public +export interface NotebookResource { + readonly etag?: string; + readonly id?: string; + name: string; + properties: Notebook; + readonly type?: string; +} + +// @public +export interface NotebookSessionProperties { + driverCores: number; + driverMemory: string; + executorCores: number; + executorMemory: string; + numExecutors: number; +} + +// @public +export type ODataAadServicePrincipalCredentialType = string; + +// @public +export type ODataAuthenticationType = string; + +// @public +export type ODataLinkedService = LinkedService & { + url: any; + authenticationType?: ODataAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + tenant?: any; + servicePrincipalId?: any; + aadResourceId?: any; + aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType; + servicePrincipalKey?: SecretBaseUnion; + servicePrincipalEmbeddedCert?: SecretBaseUnion; + servicePrincipalEmbeddedCertPassword?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type ODataResourceDataset = Dataset & { + path?: any; +}; + +// @public +export type ODataSource = CopySource & { + query?: any; +}; + +// @public +export type OdbcLinkedService = LinkedService & { + connectionString: any; + authenticationType?: any; + credential?: SecretBaseUnion; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type OdbcSink = CopySink & { + preCopyScript?: any; +}; + +// @public +export type OdbcSource = TabularSource & { + query?: any; +}; + +// @public +export type OdbcTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export type Office365Dataset = Dataset & { + tableName: any; + predicate?: any; +}; + +// @public +export type Office365LinkedService = LinkedService & { + office365TenantId: any; + servicePrincipalTenantId: any; + servicePrincipalId: any; + servicePrincipalKey: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type Office365Source = CopySource & { + allowedGroups?: any; + userScopeFilterUri?: any; + dateFilterColumn?: any; + startTime?: any; + endTime?: any; + outputColumns?: any; +}; + +// @public +export type OracleLinkedService = LinkedService & { + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type OraclePartitionOption = string; + +// @public +export interface OraclePartitionSettings { + partitionColumnName?: any; + partitionLowerBound?: any; + partitionNames?: any; + partitionUpperBound?: any; +} + +// @public +export type OracleServiceCloudLinkedService = LinkedService & { + host: any; + username: any; + password: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type OracleServiceCloudObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type OracleServiceCloudSource = TabularSource & { + query?: any; +}; + +// @public +export type OracleSink = CopySink & { + preCopyScript?: any; +}; + +// @public +export type OracleSource = CopySource & { + oracleReaderQuery?: any; + queryTimeout?: any; + partitionOption?: OraclePartitionOption; + partitionSettings?: OraclePartitionSettings; +}; + +// @public +export type OracleTableDataset = Dataset & { + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type OrcCompressionCodec = string; + +// @public +export type OrcDataset = Dataset & { + location?: DatasetLocationUnion; + orcCompressionCodec?: OrcCompressionCodec; +}; + +// @public +export type OrcFormat = DatasetStorageFormat & {}; + +// @public +export type OrcSink = CopySink & { + storeSettings?: StoreWriteSettingsUnion; +}; + +// @public +export type OrcSource = CopySource & { + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export interface ParameterSpecification { + defaultValue?: any; + type: ParameterType; +} + +// @public +export type ParameterType = string; + +// @public +export type ParquetCompressionCodec = string; + +// @public +export type ParquetDataset = Dataset & { + location?: DatasetLocationUnion; + compressionCodec?: ParquetCompressionCodec; +}; + +// @public +export type ParquetFormat = DatasetStorageFormat & {}; + +// @public +export type ParquetSink = CopySink & { + storeSettings?: StoreWriteSettingsUnion; +}; + +// @public +export type ParquetSource = CopySource & { + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export type PaypalLinkedService = LinkedService & { + host: any; + clientId: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type PaypalObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type PaypalSource = TabularSource & { + query?: any; +}; + +// @public +export type PhoenixAuthenticationType = string; + +// @public +export type PhoenixLinkedService = LinkedService & { + host: any; + port?: any; + httpPath?: any; + authenticationType: PhoenixAuthenticationType; + username?: any; + password?: SecretBaseUnion; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type PhoenixObjectDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type PhoenixSource = TabularSource & { + query?: any; +}; + +// @public +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { + isRecovery?: boolean; + parameters?: { + [propertyName: string]: any; + }; + referencePipelineRunId?: string; + startActivityName?: string; +} + +// @public +export type PipelineCreatePipelineRunResponse = CreateRunResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: CreateRunResponse; + }; +}; + +// @public +export interface PipelineFolder { + name?: string; +} + +// @public +export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type PipelineGetPipelineResponse = PipelineResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineResource; + }; +}; + +// @public +export type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineListResponse; + }; +}; + +// @public +export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineListResponse; + }; +}; + +// @public +export interface PipelineListResponse { + nextLink?: string; + value: PipelineResource[]; +} + +// @public +export interface PipelineReference { + name?: string; + referenceName: string; + type: PipelineReferenceType; +} + +// @public +export type PipelineReferenceType = string; + +// @public +export type PipelineResource = AzureEntityResource & { + [property: string]: any; + description?: string; + activities?: ActivityUnion[]; + parameters?: { + [propertyName: string]: ParameterSpecification; + }; + variables?: { + [propertyName: string]: VariableSpecification; + }; + concurrency?: number; + annotations?: any[]; + runDimensions?: { + [propertyName: string]: any; + }; + folder?: PipelineFolder; +}; + +// @public +export interface PipelineRun { + [property: string]: any; + readonly durationInMs?: number; + readonly invokedBy?: PipelineRunInvokedBy; + readonly isLatest?: boolean; + readonly lastUpdated?: Date; + readonly message?: string; + readonly parameters?: { + [propertyName: string]: string; + }; + readonly pipelineName?: string; + readonly runEnd?: Date; + readonly runGroupId?: string; + readonly runId?: string; + readonly runStart?: Date; + readonly status?: string; +} + +// @public +export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { + isRecursive?: boolean; +} + +// @public +export type PipelineRunGetPipelineRunResponse = PipelineRun & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineRun; + }; +}; + +// @public +export interface PipelineRunInvokedBy { + readonly id?: string; + readonly invokedByType?: string; + readonly name?: string; +} + +// @public +export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ActivityRunsQueryResponse; + }; +}; + +// @public +export type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineRunsQueryResponse; + }; +}; + +// @public +export interface PipelineRunsQueryResponse { + continuationToken?: string; + value: PipelineRun[]; +} + +// @public +export type PluginCurrentState = string; + +// @public +export interface PolybaseSettings { + [property: string]: any; + rejectSampleValue?: any; + rejectType?: PolybaseSettingsRejectType; + rejectValue?: any; + useTypeDefault?: any; +} + +// @public +export type PolybaseSettingsRejectType = string; + +// @public +export type PostgreSqlLinkedService = LinkedService & { + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type PostgreSqlSource = TabularSource & { + query?: any; +}; + +// @public +export type PostgreSqlTableDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type PrestoAuthenticationType = string; + +// @public +export type PrestoLinkedService = LinkedService & { + host: any; + serverVersion: any; + catalog: any; + port?: any; + authenticationType: PrestoAuthenticationType; + username?: any; + password?: SecretBaseUnion; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + timeZoneID?: any; + encryptedCredential?: any; +}; + +// @public +export type PrestoObjectDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type PrestoSource = TabularSource & { + query?: any; +}; + +// @public +export interface PrivateEndpoint { + readonly id?: string; +} + +// @public +export type PrivateEndpointConnection = Resource & { + privateEndpoint?: PrivateEndpoint; + privateLinkServiceConnectionState?: PrivateLinkServiceConnectionState; + readonly provisioningState?: string; +}; + +// @public +export interface PrivateLinkServiceConnectionState { + readonly actionsRequired?: string; + description?: string; + status?: string; +} + +// @public +export type ProxyResource = Resource & {}; + +// @public +export interface PurviewConfiguration { + purviewResourceId?: string; +} + +// @public +export interface QueryDataFlowDebugSessionsResponse { + nextLink?: string; + value?: DataFlowDebugSessionInfo[]; +} + +// @public +export type QuickBooksLinkedService = LinkedService & { + endpoint: any; + companyId: any; + consumerKey: any; + consumerSecret: SecretBaseUnion; + accessToken: SecretBaseUnion; + accessTokenSecret: SecretBaseUnion; + useEncryptedEndpoints?: any; + encryptedCredential?: any; +}; + +// @public +export type QuickBooksObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type QuickBooksSource = TabularSource & { + query?: any; +}; + +// @public +export type RecurrenceFrequency = string; + +// @public +export interface RecurrenceSchedule { + [property: string]: any; + hours?: number[]; + minutes?: number[]; + monthDays?: number[]; + monthlyOccurrences?: RecurrenceScheduleOccurrence[]; + weekDays?: DayOfWeek[]; +} + +// @public +export interface RecurrenceScheduleOccurrence { + [property: string]: any; + day?: DayOfWeek; + occurrence?: number; +} + +// @public +export interface RedirectIncompatibleRowSettings { + [property: string]: any; + linkedServiceName: any; + path?: any; +} + +// @public +export interface RedshiftUnloadSettings { + bucketName: any; + s3LinkedServiceName: LinkedServiceReference; +} + +// @public +export type RelationalSource = CopySource & { + query?: any; +}; + +// @public +export type RelationalTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export interface RerunTriggerListResponse { + readonly nextLink?: string; + value: RerunTriggerResource[]; +} + +// @public +export type RerunTriggerResource = AzureEntityResource & { + properties: RerunTumblingWindowTrigger; +}; + +// @public +export type RerunTumblingWindowTrigger = Trigger & { + parentTrigger?: any; + requestedStartTime: Date; + requestedEndTime: Date; + maxConcurrency: number; +}; + +// @public +export interface RerunTumblingWindowTriggerActionParameters { + endTime: Date; + maxConcurrency: number; + startTime: Date; +} + +// @public +export interface Resource { + readonly id?: string; + readonly name?: string; + readonly type?: string; +} + +// @public +export type ResourceIdentityType = "None" | "SystemAssigned"; + +// @public +export type ResponsysLinkedService = LinkedService & { + endpoint: any; + clientId: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ResponsysObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type ResponsysSource = TabularSource & { + query?: any; +}; + +// @public +export type RestResourceDataset = Dataset & { + relativeUrl?: any; + requestMethod?: any; + requestBody?: any; + additionalHeaders?: any; + paginationRules?: any; +}; + +// @public +export type RestServiceAuthenticationType = string; + +// @public +export type RestServiceLinkedService = LinkedService & { + url: any; + enableServerCertificateValidation?: any; + authenticationType: RestServiceAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + aadResourceId?: any; + encryptedCredential?: any; +}; + +// @public +export type RestSource = CopySource & { + requestMethod?: any; + requestBody?: any; + additionalHeaders?: any; + paginationRules?: any; + httpRequestTimeout?: any; + requestInterval?: any; +}; + +// @public +export interface RetryPolicy { + count?: any; + intervalInSeconds?: number; +} + +// @public +export interface RunFilterParameters { + continuationToken?: string; + filters?: RunQueryFilter[]; + lastUpdatedAfter: Date; + lastUpdatedBefore: Date; + orderBy?: RunQueryOrderBy[]; +} + +// @public +export interface RunQueryFilter { + operand: RunQueryFilterOperand; + operator: RunQueryFilterOperator; + values: string[]; +} + +// @public +export type RunQueryFilterOperand = string; + +// @public +export type RunQueryFilterOperator = string; + +// @public +export type RunQueryOrder = string; + +// @public +export interface RunQueryOrderBy { + order: RunQueryOrder; + orderBy: RunQueryOrderByField; +} + +// @public +export type RunQueryOrderByField = string; + +// @public +export type SalesforceLinkedService = LinkedService & { + environmentUrl?: any; + username?: any; + password?: SecretBaseUnion; + securityToken?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SalesforceMarketingCloudLinkedService = LinkedService & { + clientId: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type SalesforceMarketingCloudObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type SalesforceMarketingCloudSource = TabularSource & { + query?: any; +}; + +// @public +export type SalesforceObjectDataset = Dataset & { + objectApiName?: any; +}; + +// @public +export type SalesforceServiceCloudLinkedService = LinkedService & { + environmentUrl?: any; + username?: any; + password?: SecretBaseUnion; + securityToken?: SecretBaseUnion; + extendedProperties?: any; + encryptedCredential?: any; +}; + +// @public +export type SalesforceServiceCloudObjectDataset = Dataset & { + objectApiName?: any; +}; + +// @public +export type SalesforceServiceCloudSink = CopySink & { + writeBehavior?: SalesforceSinkWriteBehavior; + externalIdFieldName?: any; + ignoreNullValues?: any; +}; + +// @public +export type SalesforceServiceCloudSource = CopySource & { + query?: any; + readBehavior?: SalesforceSourceReadBehavior; +}; + +// @public +export type SalesforceSink = CopySink & { + writeBehavior?: SalesforceSinkWriteBehavior; + externalIdFieldName?: any; + ignoreNullValues?: any; +}; + +// @public +export type SalesforceSinkWriteBehavior = string; + +// @public +export type SalesforceSource = TabularSource & { + query?: any; + readBehavior?: SalesforceSourceReadBehavior; +}; + +// @public +export type SalesforceSourceReadBehavior = string; + +// @public +export type SapBwCubeDataset = Dataset & {}; + +// @public +export type SapBWLinkedService = LinkedService & { + server: any; + systemNumber: any; + clientId: any; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SapBwSource = TabularSource & { + query?: any; +}; + +// @public +export type SapCloudForCustomerLinkedService = LinkedService & { + url: any; + username?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SapCloudForCustomerResourceDataset = Dataset & { + path: any; +}; + +// @public +export type SapCloudForCustomerSink = CopySink & { + writeBehavior?: SapCloudForCustomerSinkWriteBehavior; +}; + +// @public +export type SapCloudForCustomerSinkWriteBehavior = string; + +// @public +export type SapCloudForCustomerSource = TabularSource & { + query?: any; +}; + +// @public +export type SapEccLinkedService = LinkedService & { + url: string; + username?: string; + password?: SecretBaseUnion; + encryptedCredential?: string; +}; + +// @public +export type SapEccResourceDataset = Dataset & { + path: any; +}; + +// @public +export type SapEccSource = TabularSource & { + query?: any; +}; + +// @public +export type SapHanaAuthenticationType = string; + +// @public +export type SapHanaLinkedService = LinkedService & { + connectionString?: any; + server: any; + authenticationType?: SapHanaAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SapHanaPartitionOption = string; + +// @public +export interface SapHanaPartitionSettings { + partitionColumnName?: any; +} + +// @public +export type SapHanaSource = TabularSource & { + query?: any; + packetSize?: any; + partitionOption?: SapHanaPartitionOption; + partitionSettings?: SapHanaPartitionSettings; +}; + +// @public +export type SapHanaTableDataset = Dataset & { + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type SapOpenHubLinkedService = LinkedService & { + server: any; + systemNumber: any; + clientId: any; + language?: any; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SapOpenHubSource = TabularSource & { + excludeLastRequest?: any; + baseRequestId?: any; +}; + +// @public +export type SapOpenHubTableDataset = Dataset & { + openHubDestinationName: any; + excludeLastRequest?: any; + baseRequestId?: any; +}; + +// @public +export type SapTableLinkedService = LinkedService & { + server?: any; + systemNumber?: any; + clientId?: any; + language?: any; + systemId?: any; + userName?: any; + password?: SecretBaseUnion; + messageServer?: any; + messageServerService?: any; + sncMode?: any; + sncMyName?: any; + sncPartnerName?: any; + sncLibraryPath?: any; + sncQop?: any; + logonGroup?: any; + encryptedCredential?: any; +}; + +// @public +export type SapTablePartitionOption = string; + +// @public +export interface SapTablePartitionSettings { + maxPartitionsNumber?: any; + partitionColumnName?: any; + partitionLowerBound?: any; + partitionUpperBound?: any; +} + +// @public +export type SapTableResourceDataset = Dataset & { + tableName: any; +}; + +// @public +export type SapTableSource = TabularSource & { + rowCount?: any; + rowSkips?: any; + rfcTableFields?: any; + rfcTableOptions?: any; + batchSize?: any; + customRfcReadTableFunctionModule?: any; + partitionOption?: SapTablePartitionOption; + partitionSettings?: SapTablePartitionSettings; +}; + +// @public +export type SchedulerCurrentState = string; + +// @public +export type ScheduleTrigger = MultiplePipelineTrigger & { + recurrence: ScheduleTriggerRecurrence; +}; + +// @public +export interface ScheduleTriggerRecurrence { + [property: string]: any; + endTime?: Date; + frequency?: RecurrenceFrequency; + interval?: number; + schedule?: RecurrenceSchedule; + startTime?: Date; + timeZone?: string; +} + +// @public +export interface ScriptAction { + name: string; + parameters?: string; + roles: HdiNodeTypes; + uri: string; +} + +// @public +export interface SecretBase { + type: "SecureString" | "AzureKeyVaultSecret"; +} + +// @public (undocumented) +export type SecretBaseUnion = SecureString | AzureKeyVaultSecretReference; + +// @public +export type SecureString = SecretBase & { + value: string; +}; + +// @public +export type SelfDependencyTumblingWindowTriggerReference = DependencyReference & { + offset: string; + size?: string; +}; + +// @public +export type SelfHostedIntegrationRuntime = IntegrationRuntime & { + linkedInfo?: LinkedIntegrationRuntimeTypeUnion; +}; + +// @public +export type ServiceNowAuthenticationType = string; + +// @public +export type ServiceNowLinkedService = LinkedService & { + endpoint: any; + authenticationType: ServiceNowAuthenticationType; + username?: any; + password?: SecretBaseUnion; + clientId?: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ServiceNowObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type ServiceNowSource = TabularSource & { + query?: any; +}; + +// @public +export type SetVariableActivity = Activity & { + variableName?: string; + value?: any; +}; + +// @public +export type SftpAuthenticationType = string; + +// @public +export type SftpLocation = DatasetLocation & {}; + +// @public +export type SftpReadSettings = StoreReadSettings & { + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type SftpServerLinkedService = LinkedService & { + host: any; + port?: any; + authenticationType?: SftpAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; + privateKeyPath?: any; + privateKeyContent?: SecretBaseUnion; + passPhrase?: SecretBaseUnion; + skipHostKeyValidation?: any; + hostKeyFingerprint?: any; +}; + +// @public +export type SftpWriteSettings = StoreWriteSettings & { + operationTimeout?: any; +}; + +// @public +export type ShopifyLinkedService = LinkedService & { + host: any; + accessToken?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ShopifyObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type ShopifySource = TabularSource & { + query?: any; +}; + +// @public +export interface Sku { + capacity?: number; + name?: string; + tier?: string; +} + +// @public +export type SparkAuthenticationType = string; + +// @public (undocumented) +export interface SparkBatchJob { + appId?: string | null; + appInfo?: { + [propertyName: string]: string; + } | null; + artifactId?: string; + errors?: SparkServiceError[]; + id: number; + jobType?: SparkJobType; + // (undocumented) + livyInfo?: SparkBatchJobState; + logLines?: string[] | null; + name?: string; + plugin?: SparkServicePlugin; + result?: SparkBatchJobResultType; + scheduler?: SparkScheduler; + sparkPoolName?: string; + state?: string; + submitterId?: string; + submitterName?: string; + tags?: { + [propertyName: string]: string; + }; + workspaceName?: string; +} + +// @public +export type SparkBatchJobResultType = string; + +// @public (undocumented) +export interface SparkBatchJobState { + currentState?: string; + deadAt?: Date | null; + // (undocumented) + jobCreationRequest?: SparkRequest; + notStartedAt?: Date | null; + recoveringAt?: Date | null; + runningAt?: Date | null; + startingAt?: Date | null; + successAt?: Date | null; + terminatedAt?: Date | null; +} + +// @public +export type SparkErrorSource = string; + +// @public +export interface SparkJobDefinition { + [property: string]: any; + description?: string; + jobProperties: SparkJobProperties; + language?: string; + requiredSparkVersion?: string; + targetBigDataPool: BigDataPoolReference; +} + +// @public +export interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobDefinitionResource; + }; +}; + +// @public +export type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJob; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJob; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobDefinitionResource; + }; +}; + +// @public +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobDefinitionsListResponse; + }; +}; + +// @public +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobDefinitionsListResponse; + }; +}; + +// @public +export type SparkJobDefinitionResource = AzureEntityResource & { + properties: SparkJobDefinition; +}; + +// @public +export interface SparkJobDefinitionsListResponse { + nextLink?: string; + value: SparkJobDefinitionResource[]; +} + +// @public +export interface SparkJobProperties { + [property: string]: any; + archives?: string[]; + args?: string[]; + className?: string; + conf?: any; + driverCores: number; + driverMemory: string; + executorCores: number; + executorMemory: string; + file: string; + files?: string[]; + jars?: string[]; + name?: string; + numExecutors: number; +} + +// @public +export type SparkJobReferenceType = string; + +// @public +export type SparkJobType = string; + +// @public +export type SparkLinkedService = LinkedService & { + host: any; + port: any; + serverType?: SparkServerType; + thriftTransportProtocol?: SparkThriftTransportProtocol; + authenticationType: SparkAuthenticationType; + username?: any; + password?: SecretBaseUnion; + httpPath?: any; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type SparkObjectDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public (undocumented) +export interface SparkRequest { + // (undocumented) + archives?: string[]; + // (undocumented) + arguments?: string[]; + // (undocumented) + className?: string; + configuration?: { + [propertyName: string]: string; + }; + // (undocumented) + driverCores?: number; + // (undocumented) + driverMemory?: string; + // (undocumented) + executorCores?: number; + // (undocumented) + executorCount?: number; + // (undocumented) + executorMemory?: string; + // (undocumented) + file?: string; + // (undocumented) + files?: string[]; + // (undocumented) + jars?: string[]; + // (undocumented) + name?: string; + // (undocumented) + pythonFiles?: string[]; +} + +// @public (undocumented) +export interface SparkScheduler { + // (undocumented) + cancellationRequestedAt?: Date; + // (undocumented) + currentState?: SchedulerCurrentState; + // (undocumented) + endedAt?: Date | null; + // (undocumented) + scheduledAt?: Date | null; + // (undocumented) + submittedAt?: Date | null; +} + +// @public +export type SparkServerType = string; + +// @public (undocumented) +export interface SparkServiceError { + // (undocumented) + errorCode?: string; + // (undocumented) + message?: string; + // (undocumented) + source?: SparkErrorSource; +} + +// @public (undocumented) +export interface SparkServicePlugin { + // (undocumented) + cleanupStartedAt?: Date | null; + // (undocumented) + currentState?: PluginCurrentState; + // (undocumented) + monitoringStartedAt?: Date | null; + // (undocumented) + preparationStartedAt?: Date | null; + // (undocumented) + resourceAcquisitionStartedAt?: Date | null; + // (undocumented) + submissionStartedAt?: Date | null; +} + +// @public +export type SparkSource = TabularSource & { + query?: any; +}; + +// @public +export type SparkThriftTransportProtocol = string; + +// @public +export interface SqlConnection { + [property: string]: any; + name: string; + type: SqlConnectionType; +} + +// @public +export type SqlConnectionType = string; + +// @public +export type SqlDWSink = CopySink & { + preCopyScript?: any; + allowPolyBase?: any; + polyBaseSettings?: PolybaseSettings; + allowCopyCommand?: any; + copyCommandSettings?: DWCopyCommandSettings; + tableOption?: any; +}; + +// @public +export type SqlDWSource = TabularSource & { + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: any; +}; + +// @public +export type SqlMISink = CopySink & { + sqlWriterStoredProcedureName?: any; + sqlWriterTableType?: any; + preCopyScript?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + storedProcedureTableTypeParameterName?: any; + tableOption?: any; +}; + +// @public +export type SqlMISource = TabularSource & { + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + produceAdditionalTypes?: any; +}; + +// @public +export type SqlPool = TrackedResource & { + sku?: Sku; + maxSizeBytes?: number; + collation?: string; + sourceDatabaseId?: string; + recoverableDatabaseId?: string; + provisioningState?: string; + status?: string; + restorePointInTime?: string; + createMode?: string; + creationDate?: Date; +}; + +// @public +export interface SqlPoolInfoListResult { + nextLink?: string; + value?: SqlPool[]; +} + +// @public +export interface SqlPoolReference { + referenceName: string; + type: SqlPoolReferenceType; +} + +// @public +export type SqlPoolReferenceType = string; + +// @public +export type SqlPoolsGetResponse = SqlPool & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlPool; + }; +}; + +// @public +export type SqlPoolsListResponse = SqlPoolInfoListResult & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlPoolInfoListResult; + }; +}; + +// @public +export type SqlPoolStoredProcedureActivity = Activity & { + sqlPool: SqlPoolReference; + storedProcedureName: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +// @public +export interface SqlScript { + [property: string]: any; + content: SqlScriptContent; + description?: string; + type?: SqlScriptType; +} + +// @public +export interface SqlScriptContent { + [property: string]: any; + currentConnection: SqlConnection; + metadata?: SqlScriptMetadata; + query: string; +} + +// @public +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlScriptResource; + }; +}; + +// @public +export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type SqlScriptGetSqlScriptResponse = SqlScriptResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlScriptResource; + }; +}; + +// @public +export type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlScriptsListResponse; + }; +}; + +// @public +export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlScriptsListResponse; + }; +}; + +// @public +export interface SqlScriptMetadata { + [property: string]: any; + language?: string; +} + +// @public +export interface SqlScriptResource { + readonly etag?: string; + readonly id?: string; + name: string; + properties: SqlScript; + readonly type?: string; +} + +// @public +export interface SqlScriptsListResponse { + nextLink?: string; + value: SqlScriptResource[]; +} + +// @public +export type SqlScriptType = string; + +// @public +export type SqlServerLinkedService = LinkedService & { + connectionString: any; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SqlServerSink = CopySink & { + sqlWriterStoredProcedureName?: any; + sqlWriterTableType?: any; + preCopyScript?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + storedProcedureTableTypeParameterName?: any; + tableOption?: any; +}; + +// @public +export type SqlServerSource = TabularSource & { + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + produceAdditionalTypes?: any; +}; + +// @public +export type SqlServerStoredProcedureActivity = ExecutionActivity & { + storedProcedureName: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +// @public +export type SqlServerTableDataset = Dataset & { + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type SqlSink = CopySink & { + sqlWriterStoredProcedureName?: any; + sqlWriterTableType?: any; + preCopyScript?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + storedProcedureTableTypeParameterName?: any; + tableOption?: any; +}; + +// @public +export type SqlSource = TabularSource & { + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +// @public +export type SquareLinkedService = LinkedService & { + host: any; + clientId: any; + clientSecret?: SecretBaseUnion; + redirectUri: any; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type SquareObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type SquareSource = TabularSource & { + query?: any; +}; + +// @public +export interface SsisAccessCredential { + domain: any; + password: SecretBaseUnion; + userName: any; +} + +// @public +export interface SsisChildPackage { + packageContent: any; + packageLastModifiedDate?: string; + packageName?: string; + packagePath: any; +} + +// @public +export interface SsisExecutionCredential { + domain: any; + password: SecureString; + userName: any; +} + +// @public +export interface SsisExecutionParameter { + value: any; +} + +// @public +export interface SsisLogLocation { + accessCredential?: SsisAccessCredential; + logPath: any; + logRefreshInterval?: any; + type: SsisLogLocationType; +} + +// @public +export type SsisLogLocationType = string; + +// @public +export interface SsisObjectMetadataStatusResponse { + error?: string; + name?: string; + properties?: string; + status?: string; +} + +// @public +export interface SsisPackageLocation { + accessCredential?: SsisAccessCredential; + childPackages?: SsisChildPackage[]; + configurationPath?: any; + packageContent?: any; + packageLastModifiedDate?: string; + packageName?: string; + packagePassword?: SecretBaseUnion; + packagePath?: any; + type?: SsisPackageLocationType; +} + +// @public +export type SsisPackageLocationType = string; + +// @public +export interface SsisPropertyOverride { + isSensitive?: boolean; + value: any; +} + +// @public +export interface StagingSettings { + [property: string]: any; + enableCompression?: any; + linkedServiceName: LinkedServiceReference; + path?: any; +} + +// @public +export interface StartDataFlowDebugSessionRequest { + dataFlow?: DataFlowResource; + datasets?: DatasetResource[]; + debugSettings?: any; + incrementalDebug?: boolean; + linkedServices?: LinkedServiceResource[]; + sessionId?: string; + staging?: any; +} + +// @public +export interface StartDataFlowDebugSessionResponse { + jobVersion?: string; +} + +// @public +export interface StoredProcedureParameter { + type?: StoredProcedureParameterType; + value?: any; +} + +// @public +export type StoredProcedureParameterType = string; + +// @public +export interface StoreReadSettings { + [property: string]: any; + maxConcurrentConnections?: any; + type: "AzureBlobStorageReadSettings" | "AzureBlobFSReadSettings" | "AzureDataLakeStoreReadSettings" | "AmazonS3ReadSettings" | "FileServerReadSettings" | "AzureFileStorageReadSettings" | "GoogleCloudStorageReadSettings" | "FtpReadSettings" | "SftpReadSettings" | "HttpReadSettings" | "HdfsReadSettings"; +} + +// @public (undocumented) +export type StoreReadSettingsUnion = AzureBlobStorageReadSettings | AzureBlobFSReadSettings | AzureDataLakeStoreReadSettings | AmazonS3ReadSettings | FileServerReadSettings | AzureFileStorageReadSettings | GoogleCloudStorageReadSettings | FtpReadSettings | SftpReadSettings | HttpReadSettings | HdfsReadSettings; + +// @public +export interface StoreWriteSettings { + [property: string]: any; + copyBehavior?: any; + maxConcurrentConnections?: any; + type: "SftpWriteSettings" | "AzureBlobStorageWriteSettings" | "AzureBlobFSWriteSettings" | "AzureDataLakeStoreWriteSettings" | "FileServerWriteSettings"; +} + +// @public (undocumented) +export type StoreWriteSettingsUnion = SftpWriteSettings | AzureBlobStorageWriteSettings | AzureBlobFSWriteSettings | AzureDataLakeStoreWriteSettings | FileServerWriteSettings; + +// @public +export type SubResource = AzureEntityResource & {}; + +// @public +export interface SubResourceDebugResource { + name?: string; +} + +// @public +export type SwitchActivity = Activity & { + on: Expression; + cases?: SwitchCase[]; + defaultActivities?: ActivityUnion[]; +}; + +// @public +export interface SwitchCase { + activities?: ActivityUnion[]; + value?: string; +} + +// @public +export type SybaseAuthenticationType = string; + +// @public +export type SybaseLinkedService = LinkedService & { + server: any; + database: any; + schema?: any; + authenticationType?: SybaseAuthenticationType; + username?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SybaseSource = TabularSource & { + query?: any; +}; + +// @public +export type SybaseTableDataset = Dataset & { + tableName?: any; +}; + +// @public +export type SynapseNotebookActivity = Activity & { + notebook: SynapseNotebookReference; + parameters?: { + [propertyName: string]: any; + }; +}; + +// @public +export interface SynapseNotebookReference { + referenceName: string; + type: NotebookReferenceType; +} + +// @public +export type SynapseSparkJobDefinitionActivity = Activity & { + sparkJob: SynapseSparkJobReference; +}; + +// @public +export interface SynapseSparkJobReference { + referenceName: string; + type: SparkJobReferenceType; +} + +// @public +export type TabularSource = CopySource & { + queryTimeout?: any; +}; + +// @public (undocumented) +export type TabularSourceUnion = AzureTableSource | InformixSource | Db2Source | OdbcSource | MySqlSource | PostgreSqlSource | SybaseSource | SapBwSource | SalesforceSource | SapCloudForCustomerSource | SapEccSource | SapHanaSource | SapOpenHubSource | SapTableSource | SqlSource | SqlServerSource | AzureSqlSource | SqlMISource | SqlDWSource | AzureMySqlSource | TeradataSource | CassandraSource | AmazonMWSSource | AzurePostgreSqlSource | ConcurSource | CouchbaseSource | DrillSource | EloquaSource | GoogleBigQuerySource | GreenplumSource | HBaseSource | HiveSource | HubspotSource | ImpalaSource | JiraSource | MagentoSource | MariaDBSource | AzureMariaDBSource | MarketoSource | PaypalSource | PhoenixSource | PrestoSource | QuickBooksSource | ServiceNowSource | ShopifySource | SparkSource | SquareSource | XeroSource | ZohoSource | NetezzaSource | VerticaSource | SalesforceMarketingCloudSource | ResponsysSource | DynamicsAXSource | OracleServiceCloudSource | GoogleAdWordsSource | AmazonRedshiftSource; + +// @public +export type TabularTranslator = CopyTranslator & { + columnMappings?: any; + schemaMapping?: any; + collectionReference?: any; + mapComplexValuesToString?: any; + mappings?: any; +}; + +// @public +export type TeradataAuthenticationType = string; + +// @public +export type TeradataLinkedService = LinkedService & { + connectionString?: any; + server?: any; + authenticationType?: TeradataAuthenticationType; + username?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type TeradataPartitionOption = string; + +// @public +export interface TeradataPartitionSettings { + partitionColumnName?: any; + partitionLowerBound?: any; + partitionUpperBound?: any; +} + +// @public +export type TeradataSource = TabularSource & { + query?: any; + partitionOption?: TeradataPartitionOption; + partitionSettings?: TeradataPartitionSettings; +}; + +// @public +export type TeradataTableDataset = Dataset & { + database?: any; + table?: any; +}; + +// @public +export type TextFormat = DatasetStorageFormat & { + columnDelimiter?: any; + rowDelimiter?: any; + escapeChar?: any; + quoteChar?: any; + nullValue?: any; + encodingName?: any; + treatEmptyAsNull?: any; + skipLineCount?: any; + firstRowAsHeader?: any; +}; + +// @public +export type TrackedResource = Resource & { + tags?: { + [propertyName: string]: string; + }; + location: string; +}; + +// @public +export interface Transformation { + description?: string; + name: string; +} + +// @public +export interface Trigger { + [property: string]: any; + annotations?: any[]; + description?: string; + readonly runtimeState?: TriggerRuntimeState; + type: "RerunTumblingWindowTrigger" | "MultiplePipelineTrigger" | "ScheduleTrigger" | "BlobTrigger" | "BlobEventsTrigger" | "TumblingWindowTrigger" | "ChainingTrigger"; +} + +// @public +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface TriggerDependencyProvisioningStatus { + provisioningStatus: string; + triggerName: string; +} + +// @public +export type TriggerDependencyReference = DependencyReference & { + referenceTrigger: TriggerReference; +}; + +// @public (undocumented) +export type TriggerDependencyReferenceUnion = TumblingWindowTriggerDependencyReference; + +// @public +export type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerSubscriptionOperationStatus; + }; +}; + +// @public +export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type TriggerGetTriggerResponse = TriggerResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerResource; + }; +}; + +// @public +export type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerListResponse; + }; +}; + +// @public +export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerListResponse; + }; +}; + +// @public +export interface TriggerListResponse { + nextLink?: string; + value: TriggerResource[]; +} + +// @public +export interface TriggerPipelineReference { + parameters?: { + [propertyName: string]: any; + }; + pipelineReference?: PipelineReference; +} + +// @public +export interface TriggerReference { + referenceName: string; + type: TriggerReferenceType; +} + +// @public +export type TriggerReferenceType = string; + +// @public +export type TriggerResource = AzureEntityResource & { + properties: TriggerUnion; +}; + +// @public +export interface TriggerRun { + [property: string]: any; + readonly message?: string; + readonly properties?: { + [propertyName: string]: string; + }; + readonly status?: TriggerRunStatus; + readonly triggeredPipelines?: { + [propertyName: string]: string; + }; + readonly triggerName?: string; + readonly triggerRunId?: string; + readonly triggerRunTimestamp?: Date; + readonly triggerType?: string; +} + +// @public +export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerRunsQueryResponse; + }; +}; + +// @public +export interface TriggerRunsQueryResponse { + continuationToken?: string; + value: TriggerRun[]; +} + +// @public +export type TriggerRunStatus = string; + +// @public +export type TriggerRuntimeState = string; + +// @public +export type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerSubscriptionOperationStatus; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface TriggerSubscriptionOperationStatus { + readonly status?: EventSubscriptionStatus; + readonly triggerName?: string; +} + +// @public (undocumented) +export type TriggerUnion = RerunTumblingWindowTrigger | MultiplePipelineTriggerUnion | TumblingWindowTrigger | ChainingTrigger; + +// @public +export type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerSubscriptionOperationStatus; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export type TumblingWindowFrequency = string; + +// @public +export type TumblingWindowTrigger = Trigger & { + pipeline: TriggerPipelineReference; + frequency: TumblingWindowFrequency; + interval: number; + startTime: Date; + endTime?: Date; + delay?: any; + maxConcurrency: number; + retryPolicy?: RetryPolicy; + dependsOn?: DependencyReferenceUnion[]; +}; + +// @public +export type TumblingWindowTriggerDependencyReference = TriggerDependencyReference & { + offset?: string; + size?: string; +}; + +// @public +export type Type = string; + +// @public +export type UntilActivity = Activity & { + expression: Expression; + timeout?: any; + activities: ActivityUnion[]; +}; + +// @public +export interface UserProperty { + name: string; + value: any; +} + +// @public +export type ValidationActivity = Activity & { + timeout?: any; + sleep?: any; + minimumSize?: any; + childItems?: any; + dataset: DatasetReference; +}; + +// @public +export interface VariableSpecification { + defaultValue?: any; + type: VariableType; +} + +// @public +export type VariableType = string; + +// @public +export type VerticaLinkedService = LinkedService & { + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type VerticaSource = TabularSource & { + query?: any; +}; + +// @public +export type VerticaTableDataset = Dataset & { + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export interface VirtualNetworkProfile { + computeSubnetId?: string; +} + +// @public +export type WaitActivity = Activity & { + waitTimeInSeconds: number; +}; + +// @public +export type WebActivity = ExecutionActivity & { + method: WebActivityMethod; + url: any; + headers?: any; + body?: any; + authentication?: WebActivityAuthentication; + datasets?: DatasetReference[]; + linkedServices?: LinkedServiceReference[]; + connectVia?: IntegrationRuntimeReference; +}; + +// @public +export interface WebActivityAuthentication { + password?: SecretBaseUnion; + pfx?: SecretBaseUnion; + resource?: string; + type: string; + username?: string; +} + +// @public +export type WebActivityMethod = string; + +// @public +export type WebAnonymousAuthentication = WebLinkedServiceTypeProperties & {}; + +// @public +export type WebAuthenticationType = string; + +// @public +export type WebBasicAuthentication = WebLinkedServiceTypeProperties & { + username: any; + password: SecretBaseUnion; +}; + +// @public +export type WebClientCertificateAuthentication = WebLinkedServiceTypeProperties & { + pfx: SecretBaseUnion; + password: SecretBaseUnion; +}; + +// @public +export type WebHookActivity = Activity & { + method: WebHookActivityMethod; + url: any; + timeout?: string; + headers?: any; + body?: any; + authentication?: WebActivityAuthentication; + reportStatusOnCallBack?: any; +}; + +// @public +export type WebHookActivityMethod = string; + +// @public +export type WebLinkedService = LinkedService & { + typeProperties: WebLinkedServiceTypePropertiesUnion; +}; + +// @public +export interface WebLinkedServiceTypeProperties { + authenticationType: "Anonymous" | "Basic" | "ClientCertificate"; + url: any; +} + +// @public (undocumented) +export type WebLinkedServiceTypePropertiesUnion = WebAnonymousAuthentication | WebBasicAuthentication | WebClientCertificateAuthentication; + +// @public +export type WebSource = CopySource & {}; + +// @public +export type WebTableDataset = Dataset & { + index: any; + path?: any; +}; + +// @public +export type Workspace = TrackedResource & { + identity?: ManagedIdentity; + defaultDataLakeStorage?: DataLakeStorageAccountDetails; + sqlAdministratorLoginPassword?: string; + managedResourceGroupName?: string; + readonly provisioningState?: string; + sqlAdministratorLogin?: string; + virtualNetworkProfile?: VirtualNetworkProfile; + connectivityEndpoints?: { + [propertyName: string]: string; + }; + managedVirtualNetwork?: string; + privateEndpointConnections?: PrivateEndpointConnection[]; + encryption?: EncryptionDetails; + readonly workspaceUID?: string; + readonly extraProperties?: { + [propertyName: string]: any; + }; + managedVirtualNetworkSettings?: ManagedVirtualNetworkSettings; + workspaceRepositoryConfiguration?: WorkspaceRepositoryConfiguration; + purviewConfiguration?: PurviewConfiguration; +}; + +// @public +export type WorkspaceGetResponse = Workspace & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: Workspace; + }; +}; + +// @public +export interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams extends coreHttp.OperationOptions { + clientRequestId?: string; +} + +// @public +export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: GitHubAccessTokenResponse; + }; +}; + +// @public +export interface WorkspaceIdentity { + readonly principalId?: string; + readonly tenantId?: string; + type: "SystemAssigned"; +} + +// @public +export interface WorkspaceKeyDetails { + keyVaultUrl?: string; + name?: string; +} + +// @public +export interface WorkspaceRepositoryConfiguration { + accountName?: string; + collaborationBranch?: string; + hostName?: string; + projectName?: string; + repositoryName?: string; + rootFolder?: string; + type?: string; +} + +// @public +export interface WorkspaceUpdateParameters { + identity?: WorkspaceIdentity; + tags?: { + [propertyName: string]: string; + }; +} + +// @public +export type XeroLinkedService = LinkedService & { + host: any; + consumerKey?: SecretBaseUnion; + privateKey?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type XeroObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type XeroSource = TabularSource & { + query?: any; +}; + +// @public +export type ZohoLinkedService = LinkedService & { + endpoint: any; + accessToken?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ZohoObjectDataset = Dataset & { + tableName?: any; +}; + +// @public +export type ZohoSource = TabularSource & { + query?: any; +}; + + +// Warnings were encountered during analysis: +// +// src/models/index.ts:15209:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-artifacts/rollup.config.js b/sdk/synapse/synapse-artifacts/rollup.config.js new file mode 100644 index 000000000000..1cc609990f10 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/rollup.config.js @@ -0,0 +1,31 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./esm/artifactsClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/synapse-artifacts.js", + format: "umd", + name: "Azure.SynapseArtifacts", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [nodeResolve({ module: true }), sourcemaps()] +}; + +export default config; diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClient.ts b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts new file mode 100644 index 000000000000..cc724b6d34d4 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts @@ -0,0 +1,71 @@ +import * as coreHttp from "@azure/core-http"; +import { + LinkedService, + Dataset, + Pipeline, + PipelineRun, + Trigger, + TriggerRun, + DataFlow, + DataFlowDebugSession, + SqlScript, + SparkJobDefinition, + Notebook, + Workspace, + SqlPools, + BigDataPools, + IntegrationRuntimes, + WorkspaceGitRepoManagement +} from "./operations"; +import { ArtifactsClientContext } from "./artifactsClientContext"; +import { ArtifactsClientOptionalParams } from "./models"; + +export class ArtifactsClient extends ArtifactsClientContext { + /** + * Initializes a new instance of the ArtifactsClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: ArtifactsClientOptionalParams + ) { + super(credentials, endpoint, options); + this.linkedService = new LinkedService(this); + this.dataset = new Dataset(this); + this.pipeline = new Pipeline(this); + this.pipelineRun = new PipelineRun(this); + this.trigger = new Trigger(this); + this.triggerRun = new TriggerRun(this); + this.dataFlow = new DataFlow(this); + this.dataFlowDebugSession = new DataFlowDebugSession(this); + this.sqlScript = new SqlScript(this); + this.sparkJobDefinition = new SparkJobDefinition(this); + this.notebook = new Notebook(this); + this.workspace = new Workspace(this); + this.sqlPools = new SqlPools(this); + this.bigDataPools = new BigDataPools(this); + this.integrationRuntimes = new IntegrationRuntimes(this); + this.workspaceGitRepoManagement = new WorkspaceGitRepoManagement(this); + } + + linkedService: LinkedService; + dataset: Dataset; + pipeline: Pipeline; + pipelineRun: PipelineRun; + trigger: Trigger; + triggerRun: TriggerRun; + dataFlow: DataFlow; + dataFlowDebugSession: DataFlowDebugSession; + sqlScript: SqlScript; + sparkJobDefinition: SparkJobDefinition; + notebook: Notebook; + workspace: Workspace; + sqlPools: SqlPools; + bigDataPools: BigDataPools; + integrationRuntimes: IntegrationRuntimes; + workspaceGitRepoManagement: WorkspaceGitRepoManagement; +} diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts new file mode 100644 index 000000000000..3c6f3a11f2e1 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -0,0 +1,63 @@ +import * as coreHttp from "@azure/core-http"; +import { ArtifactsClientOptionalParams } from "./models"; +import { lroPolicy } from "./lro"; + +const packageName = "@azure/synapse-artifacts"; +const packageVersion = "1.0.0"; + +export class ArtifactsClientContext extends coreHttp.ServiceClient { + endpoint: string; + apiVersion: string; + + /** + * Initializes a new instance of the ArtifactsClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: ArtifactsClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + const defaultPipelines = Array.isArray(options.requestPolicyFactories) + ? options.requestPolicyFactories + : (coreHttp.createPipelineFromOptions(options) + .requestPolicyFactories as coreHttp.RequestPolicyFactory[]); + + options = { + ...options, + requestPolicyFactories: [lroPolicy(), ...defaultPipelines] + }; + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = options.endpoint || "{endpoint}"; + + // Parameter assignments + this.endpoint = endpoint; + + // Assigning values to Constant parameters + this.apiVersion = options.apiVersion || "2019-06-01-preview"; + } +} diff --git a/sdk/synapse/synapse-artifacts/src/index.ts b/sdk/synapse/synapse-artifacts/src/index.ts new file mode 100644 index 000000000000..53ba3a9039ab --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/index.ts @@ -0,0 +1,4 @@ +/// +export * from "./models"; +export { ArtifactsClient } from "./artifactsClient"; +export { ArtifactsClientContext } from "./artifactsClientContext"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts new file mode 100644 index 000000000000..0b5d4232590f --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -0,0 +1,231 @@ +import { + LROStrategy, + BaseResult, + LROOperationStep, + LROResponseInfo, + FinalStateVia, + LROSYM +} from "./models"; +import { + OperationSpec, + OperationArguments, + OperationResponse +} from "@azure/core-http"; +import { terminalStates } from "./constants"; +import { SendOperationFn } from "."; + +export function createAzureAsyncOperationStrategy( + initialOperation: LROOperationStep, + sendOperationFn: SendOperationFn, + finalStateVia?: FinalStateVia +): LROStrategy { + const lroData = initialOperation.result._response[LROSYM]; + if (!lroData) { + throw new Error( + "Expected lroData to be defined for Azure-AsyncOperation strategy" + ); + } + + let currentOperation = initialOperation; + let lastKnownPollingUrl = + lroData.azureAsyncOperation || lroData.operationLocation; + + return { + isTerminal: () => { + const currentResult = currentOperation.result._response[LROSYM]; + + if (!currentResult) { + throw new Error("Expected lroData to determine terminal status"); + } + + if (currentOperation === initialOperation) { + // Azure-AsyncOperations don't need to check for terminal state + // on originalOperation result, always need to poll + return false; + } + + const { status = "succeeded" } = currentResult; + return terminalStates.includes(status.toLowerCase()); + }, + sendFinalRequest: async () => { + if (!initialOperation.result._response[LROSYM]) { + throw new Error("Expected lroData to determine terminal status"); + } + + if (!currentOperation.result._response[LROSYM]) { + throw new Error("Expected lroData to determine terminal status"); + } + + const initialOperationResult = initialOperation.result._response[LROSYM]; + const currentOperationResult = currentOperation.result._response[LROSYM]; + + if ( + !shouldPerformFinalGet(initialOperationResult, currentOperationResult) + ) { + return currentOperation; + } + + if (initialOperationResult?.requestMethod === "PUT") { + currentOperation = await sendFinalGet( + initialOperation, + sendOperationFn + ); + + return currentOperation; + } + + if (initialOperationResult?.location) { + switch (finalStateVia) { + case "original-uri": + currentOperation = await sendFinalGet( + initialOperation, + sendOperationFn + ); + return currentOperation; + + case "azure-async-operation": + return currentOperation; + case "location": + default: + const location = + initialOperationResult.location || + currentOperationResult?.location; + + if (!location) { + throw new Error("Couldn't determine final GET URL from location"); + } + + return await sendFinalGet( + initialOperation, + sendOperationFn, + location + ); + } + } + + // All other cases return the last operation + return currentOperation; + }, + poll: async () => { + if (!lastKnownPollingUrl) { + throw new Error("Unable to determine polling url"); + } + + const pollingArgs = currentOperation.args; + // Make sure we don't send any body to the get request + const { requestBody, responses, ...restSpec } = currentOperation.spec; + + const pollingSpec: OperationSpec = { + ...restSpec, + responses: getCompositeMappers(responses), + httpMethod: "GET", + path: lastKnownPollingUrl + }; + + const result = await sendOperationFn(pollingArgs, pollingSpec); + + // Update latest polling url + lastKnownPollingUrl = + result._response[LROSYM]?.azureAsyncOperation || + result._response[LROSYM]?.operationLocation || + lastKnownPollingUrl; + + // Update lastOperation result + currentOperation = { + args: pollingArgs, + spec: pollingSpec, + result + }; + + return currentOperation; + } + }; +} + +/** + * Polling calls will always return a status object i.e. {"status": "success"} + * these intermediate responses are not described in the swagger so we need to + * pass custom mappers at runtime. + * This function replaces all the existing mappers to be able to deserialize a status object + * @param responses Original set of responses defined in the operation + */ +function getCompositeMappers(responses: { + [responseCode: string]: OperationResponse; +}): { + [responseCode: string]: OperationResponse; +} { + return Object.keys(responses).reduce((acc, statusCode) => { + return { + ...acc, + [statusCode]: { + ...responses[statusCode], + bodyMapper: { + type: { + name: "Composite", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + } + } + } + } + } + }; + }, {} as { [responseCode: string]: OperationResponse }); +} + +function shouldPerformFinalGet( + initialResult?: LROResponseInfo, + currentResult?: LROResponseInfo +) { + const { status } = currentResult || {}; + const { requestMethod: initialRequestMethod, location } = initialResult || {}; + if (status && status.toLowerCase() !== "succeeded") { + return false; + } + + if (initialRequestMethod === "DELETE") { + return false; + } + + if (initialRequestMethod !== "PUT" && !location) { + return false; + } + + return true; +} + +async function sendFinalGet( + initialOperation: LROOperationStep, + sendOperationFn: SendOperationFn, + path?: string +): Promise> { + // Make sure we don't send any body to the get request + const { requestBody, ...restSpec } = initialOperation.spec; + const finalGetSpec: OperationSpec = { + ...restSpec, + httpMethod: "GET" + }; + + // Send final GET request to the Original URL + const spec = { + ...finalGetSpec, + ...(path && { path }) + }; + + let operationArgs: OperationArguments = initialOperation.args; + if (operationArgs.options) { + operationArgs.options.shouldDeserialize = true; + } + + const finalResult = await sendOperationFn(initialOperation.args, spec); + + return { + args: initialOperation.args, + spec, + result: finalResult + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts new file mode 100644 index 000000000000..62ed188e691e --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -0,0 +1,54 @@ +import { LROStrategy, BaseResult, LROOperationStep, LROSYM } from "./models"; +import { OperationSpec } from "@azure/core-http"; +import { terminalStates } from "./constants"; +import { SendOperationFn } from "./lroPoller"; + +/** + * Creates a polling strategy based on BodyPolling which uses the provisioning state + * from the result to determine the current operation state + */ +export function createBodyPollingStrategy( + initialOperation: LROOperationStep, + sendOperation: SendOperationFn +): LROStrategy { + if (!initialOperation.result._response[LROSYM]) { + throw new Error("Expected lroData to be defined for BodyPolling strategy"); + } + + let currentOperation = initialOperation; + + return { + isTerminal: () => { + const currentResult = currentOperation.result._response[LROSYM]; + if (!currentResult) { + throw new Error("Expected lroData to determine terminal status"); + } + + const { provisioningState = "succeeded" } = currentResult; + // If provisioning state is missing, default to Success + + return terminalStates.includes(provisioningState.toLowerCase()); + }, + sendFinalRequest: () => { + // BodyPolling doesn't require a final get so return the lastOperation + return Promise.resolve(currentOperation); + }, + poll: async () => { + // When doing BodyPolling, we need to poll to the original url with a + // GET http method + const { requestBody, ...restSpec } = initialOperation.spec; + const pollingSpec: OperationSpec = { + // Make sure we don't send any body to the get request + ...restSpec, + httpMethod: "GET" + }; + + // Execute the polling operation + initialOperation.result = await sendOperation( + initialOperation.args, + pollingSpec + ); + return initialOperation; + } + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/constants.ts b/sdk/synapse/synapse-artifacts/src/lro/constants.ts new file mode 100644 index 000000000000..b0a9acd375b7 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/constants.ts @@ -0,0 +1 @@ +export const terminalStates = ["succeeded", "failed", "canceled", "cancelled"]; diff --git a/sdk/synapse/synapse-artifacts/src/lro/index.ts b/sdk/synapse/synapse-artifacts/src/lro/index.ts new file mode 100644 index 000000000000..a2a8e9c0e3a0 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/index.ts @@ -0,0 +1,15 @@ +export { shouldDeserializeLRO } from "./requestUtils"; +export { createBodyPollingStrategy } from "./bodyPollingStrategy"; +export { terminalStates } from "./constants"; +export { lroPolicy } from "./lroPolicy"; +export { LROPoller, LROPollerOptions, SendOperationFn } from "./lroPoller"; +export { + LROResponseInfo, + BaseResult, + LROOperationStep, + LROOperationState, + LROStrategy, + LROOperation +} from "./models"; +export { makeOperation } from "./operation"; +export * from "./locationStrategy"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts new file mode 100644 index 000000000000..cfcfa8efd0a7 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -0,0 +1,67 @@ +import { BaseResult, LROOperationStep, LROStrategy, LROSYM } from "./models"; +import { SendOperationFn } from "./lroPoller"; +import { OperationSpec } from "@azure/core-http"; + +export function createLocationStrategy( + initialOperation: LROOperationStep, + sendOperationFn: SendOperationFn +): LROStrategy { + const lroData = initialOperation.result._response[LROSYM]; + if (!lroData) { + throw new Error( + "Expected lroData to be defined for Azure-AsyncOperation strategy" + ); + } + + let currentOperation = initialOperation; + let lastKnownPollingUrl = lroData.location; + + return { + isTerminal: () => { + const currentResult = currentOperation.result._response[LROSYM]; + if (!currentResult) { + throw new Error("Expected lroData to determine terminal status"); + } + + if (currentOperation === initialOperation) { + return false; + } + + if (currentResult.statusCode === 202) { + return false; + } + + return true; + }, + sendFinalRequest: () => Promise.resolve(currentOperation), + poll: async () => { + if (!lastKnownPollingUrl) { + throw new Error("Unable to determine polling url"); + } + + const pollingArgs = currentOperation.args; + // Make sure we don't send any body to the get request + const { requestBody, ...restSpec } = currentOperation.spec; + const pollingSpec: OperationSpec = { + ...restSpec, + httpMethod: "GET", + path: lastKnownPollingUrl + }; + + const result = await sendOperationFn(pollingArgs, pollingSpec); + + // Update latest polling url + lastKnownPollingUrl = + result._response[LROSYM]?.location || lastKnownPollingUrl; + + // Update lastOperation result + currentOperation = { + args: pollingArgs, + spec: pollingSpec, + result + }; + + return currentOperation; + } + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts new file mode 100644 index 000000000000..e686401a5cf9 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -0,0 +1,36 @@ +import { + RequestPolicy, + RequestPolicyOptions, + BaseRequestPolicy, + HttpOperationResponse, + WebResource +} from "@azure/core-http"; +import { LROOperationResponse, LROSYM } from "./models"; +import { getLROData } from "./requestUtils"; + +export function lroPolicy() { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => { + return new LROPolicy(nextPolicy, options); + } + }; +} + +class LROPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) { + super(nextPolicy, options); + } + + public async sendRequest( + webResource: WebResource + ): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest( + webResource + ); + const _lroData = getLROData(result); + + result[LROSYM] = _lroData; + + return result; + } +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts new file mode 100644 index 000000000000..72d979bd46a9 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -0,0 +1,147 @@ +import { Poller } from "@azure/core-lro"; +import { + OperationSpec, + OperationArguments, + delay, + RestError +} from "@azure/core-http"; +import { + BaseResult, + LROOperationState, + LROOperationStep, + FinalStateVia, + LROSYM +} from "./models"; +import { makeOperation } from "./operation"; +import { createBodyPollingStrategy } from "./bodyPollingStrategy"; +import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; +import { createLocationStrategy } from "./locationStrategy"; +import { createPassthroughStrategy } from "./passthroughStrategy"; + +export type SendOperationFn = ( + args: OperationArguments, + spec: OperationSpec +) => Promise; + +export interface LROPollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * Arguments used to send the initial operation + */ + initialOperationArguments: OperationArguments; + /** + * Operation spec provided for the initial operation + */ + initialOperationSpec: OperationSpec; + /** + * Result from the initial operation + */ + initialOperationResult: TResult; + /** + * Function to execute an operation based on an operation spec and arguments + */ + sendOperation: SendOperationFn; + /** + * Optional information on where to poll. When not defined it defaults to "Location" + */ + finalStateVia?: FinalStateVia; +} + +export class LROPoller extends Poller< + LROOperationState, + TResult +> { + private intervalInMs: number; + + constructor({ + initialOperationArguments, + initialOperationResult, + initialOperationSpec, + sendOperation, + finalStateVia, + intervalInMs = 2000 + }: LROPollerOptions) { + const initialOperation = { + args: initialOperationArguments, + spec: initialOperationSpec, + result: initialOperationResult + }; + + const pollingStrategy = getPollingStrategy( + initialOperation, + sendOperation, + finalStateVia + ); + + const state: LROOperationState = { + // Initial operation will become the last operation + initialOperation, + lastOperation: initialOperation, + pollingStrategy, + finalStateVia + }; + + const operation = makeOperation(state); + super(operation); + + this.intervalInMs = intervalInMs; + } + + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise { + return delay(this.intervalInMs); + } +} + +/** + * This function determines which strategy to use based on the response from + * the last operation executed, this last operation can be an initial operation + * or a polling operation. The 3 possible strategies are described below: + * + * A) Azure-AsyncOperation or Operation-Location + * B) Location + * C) BodyPolling (provisioningState) + * - This strategy is used when: + * - Response doesn't contain any of the following headers Location, Azure-AsyncOperation or Operation-Location + * - Last operation method is PUT + */ +function getPollingStrategy( + initialOperation: LROOperationStep, + sendOperationFn: SendOperationFn, + finalStateVia?: FinalStateVia +) { + const lroData = initialOperation.result._response[LROSYM]; + + if (!lroData) { + const error = new RestError( + "Service response doesn't include the required LRO data to continue polling" + ); + error.statusCode = initialOperation.result._response.status; + error.response = initialOperation.result._response; + throw error; + } + + if (lroData.azureAsyncOperation || lroData.operationLocation) { + return createAzureAsyncOperationStrategy( + initialOperation, + sendOperationFn, + finalStateVia + ); + } + + if (lroData.location) { + return createLocationStrategy(initialOperation, sendOperationFn); + } + + if (["PUT", "PATCH"].includes(lroData.requestMethod || "")) { + return createBodyPollingStrategy(initialOperation, sendOperationFn); + } + + // Default strategy is just a passthrough returning the initial operation + return createPassthroughStrategy(initialOperation); +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts new file mode 100644 index 000000000000..ed0dd9132876 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -0,0 +1,67 @@ +import { + OperationArguments, + OperationSpec, + RestResponse, + HttpMethods, + HttpOperationResponse +} from "@azure/core-http"; +import { PollOperationState, PollOperation } from "@azure/core-lro"; +export const LROSYM = Symbol("LROData"); + +export type FinalStateVia = + | "azure-async-operation" + | "location" + | "original-uri"; + +export interface LROResponseInfo { + requestMethod: HttpMethods; + statusCode: number; + isInitialRequest?: boolean; + azureAsyncOperation?: string; + operationLocation?: string; + location?: string; + provisioningState?: string; + status?: string; +} + +/** + * Extended operation response for LROs + */ +export type LROOperationResponse = HttpOperationResponse & { + /** + * Symbol that contains LRO details + */ + [LROSYM]?: LROResponseInfo; +}; + +export interface BaseResult extends RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: LROOperationResponse; +} + +export interface LROOperationStep { + args: OperationArguments; + spec: OperationSpec; + result: TResult; +} + +export interface LROOperationState + extends PollOperationState { + lastOperation: LROOperationStep; + initialOperation: LROOperationStep; + pollingStrategy: LROStrategy; + finalStateVia?: FinalStateVia; +} + +export interface LROStrategy { + isTerminal: () => boolean; + sendFinalRequest: () => Promise>; + poll: () => Promise>; +} + +export type LROOperation = PollOperation< + LROOperationState, + TResult +>; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts new file mode 100644 index 000000000000..9b37277e7b2a --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -0,0 +1,74 @@ +import { BaseResult, LROOperationState, LROOperation, LROSYM } from "./models"; + +/** + * Creates a copy of the operation from a given State + */ +export function makeOperation( + state: LROOperationState +): LROOperation { + return { + state: { ...state }, + update, + cancel, + toString: function(this: LROOperation) { + return JSON.stringify(this.state); + } + }; +} + +/** + * General update function for LROPoller, the general process is as follows + * 1. Check initial operation result to determine the strategy to use + * - Strategies: Location, Azure-AsyncOperation, Original Uri + * 2. Check if the operation result has a terminal state + * - Terminal state will be determined by each strategy + * 2.1 If it is terminal state Check if a final GET request is required, if so + * send final GET request and return result from operation. If no final GET + * is required, just return the result from operation. + * - Determining what to call for final request is responsibility of each strategy + * 2.2 If it is not terminal state, call the polling operation call it and go to step 1 + * - Determining what to call for polling is responsibility of each strategy + * - Strategies will always use the latest URI for polling if provided otherwise + * the last known one + */ +async function update( + this: LROOperation +): Promise> { + const state = { ...this.state }; + + const { sendFinalRequest, poll, isTerminal } = state.pollingStrategy; + const currentResponse = state.lastOperation; + const currentLroData = currentResponse.result._response[LROSYM]; + + if (!currentLroData) { + throw new Error( + "Expected lroData to be defined for updating LRO operation" + ); + } + + if (state.result) { + state.isCompleted = true; + return makeOperation(state); + } + + // Check if last result is terminal + if (isTerminal()) { + state.lastOperation = await sendFinalRequest(); + state.result = state.lastOperation.result; + } else { + state.lastOperation = await poll(); + } + + // Return operation + return makeOperation(state); +} + +/** + * Swagger doesn't support defining a cancel operation, we'll just mark + * the operation state as cancelled + */ +async function cancel( + this: LROOperation +): Promise> { + return makeOperation({ ...this.state, isCancelled: true }); +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts new file mode 100644 index 000000000000..12e48123a60f --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts @@ -0,0 +1,22 @@ +import { LROStrategy, BaseResult, LROOperationStep } from "./models"; + +/** + * Creates a polling strategy based on BodyPolling which uses the provisioning state + * from the result to determine the current operation state + */ +export function createPassthroughStrategy( + initialOperation: LROOperationStep +): LROStrategy { + return { + isTerminal: () => { + return true; + }, + sendFinalRequest: () => { + // BodyPolling doesn't require a final get so return the lastOperation + return Promise.resolve(initialOperation); + }, + poll: async () => { + throw new Error("Passthrough strategy should never poll"); + } + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts new file mode 100644 index 000000000000..e3289b95905b --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -0,0 +1,109 @@ +import { HttpOperationResponse } from "@azure/core-http"; +import { terminalStates } from "./constants"; +import { LROResponseInfo } from "./models"; + +/** + * We need to selectively deserialize our responses, only deserializing if we + * are in a final LRO response, not deserializing any polling non-terminal responses + */ +export function shouldDeserializeLRO(finalStateVia?: string) { + let initialOperationInfo: LROResponseInfo | undefined; + let isInitialRequest = true; + + return (response: HttpOperationResponse) => { + if (response.status < 200 || response.status >= 300) { + return true; + } + + if (!initialOperationInfo) { + initialOperationInfo = getLROData(response); + } else { + isInitialRequest = false; + } + + if ( + initialOperationInfo.azureAsyncOperation || + initialOperationInfo.operationLocation + ) { + return ( + !isInitialRequest && + isAsyncOperationFinalResponse( + response, + initialOperationInfo, + finalStateVia + ) + ); + } + + if (initialOperationInfo.location) { + return isLocationFinalResponse(response); + } + + if (initialOperationInfo.requestMethod === "PUT") { + return isBodyPollingFinalResponse(response); + } + + return true; + }; +} + +function isAsyncOperationFinalResponse( + response: HttpOperationResponse, + initialOperationInfo: LROResponseInfo, + finalStateVia?: string +): boolean { + const status: string = response.parsedBody?.status || "Succeeded"; + if (!terminalStates.includes(status.toLowerCase())) { + return false; + } + + if (initialOperationInfo.requestMethod === "DELETE") { + return true; + } + + if ( + initialOperationInfo.requestMethod === "PUT" && + finalStateVia && + finalStateVia.toLowerCase() === "azure-asyncoperation" + ) { + return true; + } + + if ( + initialOperationInfo.requestMethod !== "PUT" && + !initialOperationInfo.location + ) { + return true; + } + + return false; +} + +function isLocationFinalResponse(response: HttpOperationResponse): boolean { + return response.status !== 202; +} + +function isBodyPollingFinalResponse(response: HttpOperationResponse): boolean { + const provisioningState: string = + response.parsedBody?.properties?.provisioningState || "Succeeded"; + + if (terminalStates.includes(provisioningState.toLowerCase())) { + return true; + } + + return false; +} + +export function getLROData(result: HttpOperationResponse): LROResponseInfo { + const statusCode = result.status; + const { status, properties } = result.parsedBody || {}; + return { + statusCode, + azureAsyncOperation: result.headers.get("azure-asyncoperation"), + operationLocation: result.headers.get("operation-location"), + location: result.headers.get("location"), + requestMethod: result.request.method, + status, + provisioningState: properties?.provisioningState + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts new file mode 100644 index 000000000000..48df2ee3a27a --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -0,0 +1,15974 @@ +import * as coreHttp from "@azure/core-http"; +import { LROSYM, LROResponseInfo } from "../lro/models"; + +export type LinkedServiceUnion = + | AzureStorageLinkedService + | AzureBlobStorageLinkedService + | AzureTableStorageLinkedService + | AzureSqlDWLinkedService + | SqlServerLinkedService + | AzureSqlDatabaseLinkedService + | AzureSqlMILinkedService + | AzureBatchLinkedService + | AzureKeyVaultLinkedService + | CosmosDbLinkedService + | DynamicsLinkedService + | DynamicsCrmLinkedService + | CommonDataServiceForAppsLinkedService + | HDInsightLinkedService + | FileServerLinkedService + | AzureFileStorageLinkedService + | GoogleCloudStorageLinkedService + | OracleLinkedService + | AzureMySqlLinkedService + | MySqlLinkedService + | PostgreSqlLinkedService + | SybaseLinkedService + | Db2LinkedService + | TeradataLinkedService + | AzureMLLinkedService + | AzureMLServiceLinkedService + | OdbcLinkedService + | InformixLinkedService + | MicrosoftAccessLinkedService + | HdfsLinkedService + | ODataLinkedService + | WebLinkedService + | CassandraLinkedService + | MongoDbLinkedService + | MongoDbV2LinkedService + | CosmosDbMongoDbApiLinkedService + | AzureDataLakeStoreLinkedService + | AzureBlobFSLinkedService + | Office365LinkedService + | SalesforceLinkedService + | SalesforceServiceCloudLinkedService + | SapCloudForCustomerLinkedService + | SapEccLinkedService + | SapOpenHubLinkedService + | RestServiceLinkedService + | AmazonS3LinkedService + | AmazonRedshiftLinkedService + | CustomDataSourceLinkedService + | AzureSearchLinkedService + | HttpLinkedService + | FtpServerLinkedService + | SftpServerLinkedService + | SapBWLinkedService + | SapHanaLinkedService + | AmazonMWSLinkedService + | AzurePostgreSqlLinkedService + | ConcurLinkedService + | CouchbaseLinkedService + | DrillLinkedService + | EloquaLinkedService + | GoogleBigQueryLinkedService + | GreenplumLinkedService + | HBaseLinkedService + | HiveLinkedService + | HubspotLinkedService + | ImpalaLinkedService + | JiraLinkedService + | MagentoLinkedService + | MariaDBLinkedService + | AzureMariaDBLinkedService + | MarketoLinkedService + | PaypalLinkedService + | PhoenixLinkedService + | PrestoLinkedService + | QuickBooksLinkedService + | ServiceNowLinkedService + | ShopifyLinkedService + | SparkLinkedService + | SquareLinkedService + | XeroLinkedService + | ZohoLinkedService + | VerticaLinkedService + | NetezzaLinkedService + | SalesforceMarketingCloudLinkedService + | HDInsightOnDemandLinkedService + | AzureDataLakeAnalyticsLinkedService + | AzureDatabricksLinkedService + | ResponsysLinkedService + | DynamicsAXLinkedService + | OracleServiceCloudLinkedService + | GoogleAdWordsLinkedService + | SapTableLinkedService + | AzureDataExplorerLinkedService + | AzureFunctionLinkedService; +export type DatasetUnion = + | AvroDataset + | ParquetDataset + | DelimitedTextDataset + | JsonDataset + | OrcDataset + | BinaryDataset + | AzureTableDataset + | AzureSqlTableDataset + | AzureSqlMITableDataset + | AzureSqlDWTableDataset + | CassandraTableDataset + | CustomDataset + | CosmosDbSqlApiCollectionDataset + | DocumentDbCollectionDataset + | DynamicsEntityDataset + | DynamicsCrmEntityDataset + | CommonDataServiceForAppsEntityDataset + | Office365Dataset + | MongoDbCollectionDataset + | MongoDbV2CollectionDataset + | CosmosDbMongoDbApiCollectionDataset + | ODataResourceDataset + | OracleTableDataset + | TeradataTableDataset + | AzureMySqlTableDataset + | AmazonRedshiftTableDataset + | Db2TableDataset + | RelationalTableDataset + | InformixTableDataset + | OdbcTableDataset + | MySqlTableDataset + | PostgreSqlTableDataset + | MicrosoftAccessTableDataset + | SalesforceObjectDataset + | SalesforceServiceCloudObjectDataset + | SybaseTableDataset + | SapBwCubeDataset + | SapCloudForCustomerResourceDataset + | SapEccResourceDataset + | SapHanaTableDataset + | SapOpenHubTableDataset + | SqlServerTableDataset + | RestResourceDataset + | SapTableResourceDataset + | WebTableDataset + | AzureSearchIndexDataset + | AmazonMWSObjectDataset + | AzurePostgreSqlTableDataset + | ConcurObjectDataset + | CouchbaseTableDataset + | DrillTableDataset + | EloquaObjectDataset + | GoogleBigQueryObjectDataset + | GreenplumTableDataset + | HBaseObjectDataset + | HiveObjectDataset + | HubspotObjectDataset + | ImpalaObjectDataset + | JiraObjectDataset + | MagentoObjectDataset + | MariaDBTableDataset + | AzureMariaDBTableDataset + | MarketoObjectDataset + | PaypalObjectDataset + | PhoenixObjectDataset + | PrestoObjectDataset + | QuickBooksObjectDataset + | ServiceNowObjectDataset + | ShopifyObjectDataset + | SparkObjectDataset + | SquareObjectDataset + | XeroObjectDataset + | ZohoObjectDataset + | NetezzaTableDataset + | VerticaTableDataset + | SalesforceMarketingCloudObjectDataset + | ResponsysObjectDataset + | DynamicsAXResourceDataset + | OracleServiceCloudObjectDataset + | AzureDataExplorerTableDataset + | GoogleAdWordsObjectDataset; +export type ActivityUnion = + | ControlActivity + | ExecutionActivityUnion + | ExecutePipelineActivity + | IfConditionActivity + | SwitchActivity + | ForEachActivity + | WaitActivity + | UntilActivity + | ValidationActivity + | FilterActivity + | SetVariableActivity + | AppendVariableActivity + | WebHookActivity + | SynapseNotebookActivity + | SynapseSparkJobDefinitionActivity + | SqlPoolStoredProcedureActivity; +export type TriggerUnion = + | RerunTumblingWindowTrigger + | MultiplePipelineTriggerUnion + | TumblingWindowTrigger + | ChainingTrigger; +export type DataFlowUnion = MappingDataFlow; +export type IntegrationRuntimeUnion = + | ManagedIntegrationRuntime + | SelfHostedIntegrationRuntime; +export type SecretBaseUnion = SecureString | AzureKeyVaultSecretReference; +export type DatasetLocationUnion = + | AzureBlobStorageLocation + | AzureBlobFSLocation + | AzureDataLakeStoreLocation + | AmazonS3Location + | FileServerLocation + | AzureFileStorageLocation + | GoogleCloudStorageLocation + | FtpServerLocation + | SftpLocation + | HttpServerLocation + | HdfsLocation; +export type DatasetStorageFormatUnion = + | TextFormat + | JsonFormat + | AvroFormat + | OrcFormat + | ParquetFormat; +export type DatasetCompressionUnion = + | DatasetBZip2Compression + | DatasetGZipCompression + | DatasetDeflateCompression + | DatasetZipDeflateCompression; +export type WebLinkedServiceTypePropertiesUnion = + | WebAnonymousAuthentication + | WebBasicAuthentication + | WebClientCertificateAuthentication; +export type StoreReadSettingsUnion = + | AzureBlobStorageReadSettings + | AzureBlobFSReadSettings + | AzureDataLakeStoreReadSettings + | AmazonS3ReadSettings + | FileServerReadSettings + | AzureFileStorageReadSettings + | GoogleCloudStorageReadSettings + | FtpReadSettings + | SftpReadSettings + | HttpReadSettings + | HdfsReadSettings; +export type StoreWriteSettingsUnion = + | SftpWriteSettings + | AzureBlobStorageWriteSettings + | AzureBlobFSWriteSettings + | AzureDataLakeStoreWriteSettings + | FileServerWriteSettings; +export type FormatReadSettingsUnion = DelimitedTextReadSettings; +export type FormatWriteSettingsUnion = + | AvroWriteSettings + | DelimitedTextWriteSettings + | JsonWriteSettings; +export type CopySourceUnion = + | AvroSource + | ParquetSource + | DelimitedTextSource + | JsonSource + | OrcSource + | BinarySource + | TabularSourceUnion + | BlobSource + | DocumentDbCollectionSource + | CosmosDbSqlApiSource + | DynamicsSource + | DynamicsCrmSource + | CommonDataServiceForAppsSource + | RelationalSource + | MicrosoftAccessSource + | ODataSource + | SalesforceServiceCloudSource + | RestSource + | FileSystemSource + | HdfsSource + | AzureDataExplorerSource + | OracleSource + | WebSource + | MongoDbSource + | MongoDbV2Source + | CosmosDbMongoDbApiSource + | Office365Source + | AzureDataLakeStoreSource + | AzureBlobFSSource + | HttpSource; +export type CopySinkUnion = + | DelimitedTextSink + | JsonSink + | OrcSink + | AzurePostgreSqlSink + | AzureMySqlSink + | SapCloudForCustomerSink + | AzureQueueSink + | AzureTableSink + | AvroSink + | ParquetSink + | BinarySink + | BlobSink + | FileSystemSink + | DocumentDbCollectionSink + | CosmosDbSqlApiSink + | SqlSink + | SqlServerSink + | AzureSqlSink + | SqlMISink + | SqlDWSink + | OracleSink + | AzureDataLakeStoreSink + | AzureBlobFSSink + | AzureSearchIndexSink + | OdbcSink + | InformixSink + | MicrosoftAccessSink + | DynamicsSink + | DynamicsCrmSink + | CommonDataServiceForAppsSink + | AzureDataExplorerSink + | SalesforceSink + | SalesforceServiceCloudSink + | CosmosDbMongoDbApiSink; +export type CopyTranslatorUnion = TabularTranslator; +export type DependencyReferenceUnion = + | TriggerDependencyReferenceUnion + | SelfDependencyTumblingWindowTriggerReference; +export type LinkedIntegrationRuntimeTypeUnion = + | LinkedIntegrationRuntimeKeyAuthorization + | LinkedIntegrationRuntimeRbacAuthorization; +export type ExecutionActivityUnion = + | CopyActivity + | HDInsightHiveActivity + | HDInsightPigActivity + | HDInsightMapReduceActivity + | HDInsightStreamingActivity + | HDInsightSparkActivity + | ExecuteSsisPackageActivity + | CustomActivity + | SqlServerStoredProcedureActivity + | DeleteActivity + | AzureDataExplorerCommandActivity + | LookupActivity + | WebActivity + | GetMetadataActivity + | AzureMLBatchExecutionActivity + | AzureMLUpdateResourceActivity + | AzureMLExecutePipelineActivity + | DataLakeAnalyticsUsqlActivity + | DatabricksNotebookActivity + | DatabricksSparkJarActivity + | DatabricksSparkPythonActivity + | AzureFunctionActivity + | ExecuteDataFlowActivity; +export type MultiplePipelineTriggerUnion = + | ScheduleTrigger + | BlobTrigger + | BlobEventsTrigger; +export type TabularSourceUnion = + | AzureTableSource + | InformixSource + | Db2Source + | OdbcSource + | MySqlSource + | PostgreSqlSource + | SybaseSource + | SapBwSource + | SalesforceSource + | SapCloudForCustomerSource + | SapEccSource + | SapHanaSource + | SapOpenHubSource + | SapTableSource + | SqlSource + | SqlServerSource + | AzureSqlSource + | SqlMISource + | SqlDWSource + | AzureMySqlSource + | TeradataSource + | CassandraSource + | AmazonMWSSource + | AzurePostgreSqlSource + | ConcurSource + | CouchbaseSource + | DrillSource + | EloquaSource + | GoogleBigQuerySource + | GreenplumSource + | HBaseSource + | HiveSource + | HubspotSource + | ImpalaSource + | JiraSource + | MagentoSource + | MariaDBSource + | AzureMariaDBSource + | MarketoSource + | PaypalSource + | PhoenixSource + | PrestoSource + | QuickBooksSource + | ServiceNowSource + | ShopifySource + | SparkSource + | SquareSource + | XeroSource + | ZohoSource + | NetezzaSource + | VerticaSource + | SalesforceMarketingCloudSource + | ResponsysSource + | DynamicsAXSource + | OracleServiceCloudSource + | GoogleAdWordsSource + | AmazonRedshiftSource; +export type TriggerDependencyReferenceUnion = TumblingWindowTriggerDependencyReference; + +/** + * A list of linked service resources. + */ +export interface LinkedServiceListResponse { + /** + * List of linked services. + */ + value: LinkedServiceResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * The Azure Synapse nested object which contains the information and credential which can be used to connect with related store or compute resource. + */ +export interface LinkedService { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AzureStorage" + | "AzureBlobStorage" + | "AzureTableStorage" + | "AzureSqlDW" + | "SqlServer" + | "AzureSqlDatabase" + | "AzureSqlMI" + | "AzureBatch" + | "AzureKeyVault" + | "CosmosDb" + | "Dynamics" + | "DynamicsCrm" + | "CommonDataServiceForApps" + | "HDInsight" + | "FileServer" + | "AzureFileStorage" + | "GoogleCloudStorage" + | "Oracle" + | "AzureMySql" + | "MySql" + | "PostgreSql" + | "Sybase" + | "Db2" + | "Teradata" + | "AzureML" + | "AzureMLService" + | "Odbc" + | "Informix" + | "MicrosoftAccess" + | "Hdfs" + | "OData" + | "Web" + | "Cassandra" + | "MongoDb" + | "MongoDbV2" + | "CosmosDbMongoDbApi" + | "AzureDataLakeStore" + | "AzureBlobFS" + | "Office365" + | "Salesforce" + | "SalesforceServiceCloud" + | "SapCloudForCustomer" + | "SapEcc" + | "SapOpenHub" + | "RestService" + | "AmazonS3" + | "AmazonRedshift" + | "CustomDataSource" + | "AzureSearch" + | "HttpServer" + | "FtpServer" + | "Sftp" + | "SapBW" + | "SapHana" + | "AmazonMWS" + | "AzurePostgreSql" + | "Concur" + | "Couchbase" + | "Drill" + | "Eloqua" + | "GoogleBigQuery" + | "Greenplum" + | "HBase" + | "Hive" + | "Hubspot" + | "Impala" + | "Jira" + | "Magento" + | "MariaDB" + | "AzureMariaDB" + | "Marketo" + | "Paypal" + | "Phoenix" + | "Presto" + | "QuickBooks" + | "ServiceNow" + | "Shopify" + | "Spark" + | "Square" + | "Xero" + | "Zoho" + | "Vertica" + | "Netezza" + | "SalesforceMarketingCloud" + | "HDInsightOnDemand" + | "AzureDataLakeAnalytics" + | "AzureDatabricks" + | "Responsys" + | "DynamicsAX" + | "OracleServiceCloud" + | "GoogleAdWords" + | "SapTable" + | "AzureDataExplorer" + | "AzureFunction"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The integration runtime reference. + */ + connectVia?: IntegrationRuntimeReference; + /** + * Linked service description. + */ + description?: string; + /** + * Parameters for linked service. + */ + parameters?: { [propertyName: string]: ParameterSpecification }; + /** + * List of tags that can be used for describing the linked service. + */ + annotations?: any[]; +} + +/** + * Integration runtime reference type. + */ +export interface IntegrationRuntimeReference { + /** + * Type of integration runtime. + */ + type: IntegrationRuntimeReferenceType; + /** + * Reference integration runtime name. + */ + referenceName: string; + /** + * Arguments for integration runtime. + */ + parameters?: { [propertyName: string]: any }; +} + +/** + * Definition of a single parameter for an entity. + */ +export interface ParameterSpecification { + /** + * Parameter type. + */ + type: ParameterType; + /** + * Default value of parameter. + */ + defaultValue?: any; +} + +/** + * Common fields that are returned in the response for all Azure Resource Manager resources + */ +export interface Resource { + /** + * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; +} + +/** + * The object that defines the structure of an Azure Synapse error response. + */ +export interface CloudError { + /** + * Error code. + */ + code: string; + /** + * Error message. + */ + message: string; + /** + * Property name/path in request associated with error. + */ + target?: string; + /** + * Array with additional error details. + */ + details?: CloudError[]; +} + +/** + * Request body structure for rename artifact. + */ +export interface ArtifactRenameRequest { + /** + * New name of the artifact. + */ + newName?: string; +} + +/** + * A list of dataset resources. + */ +export interface DatasetListResponse { + /** + * List of datasets. + */ + value: DatasetResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. + */ +export interface Dataset { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "Avro" + | "Parquet" + | "DelimitedText" + | "Json" + | "Orc" + | "Binary" + | "AzureTable" + | "AzureSqlTable" + | "AzureSqlMITable" + | "AzureSqlDWTable" + | "CassandraTable" + | "CustomDataset" + | "CosmosDbSqlApiCollection" + | "DocumentDbCollection" + | "DynamicsEntity" + | "DynamicsCrmEntity" + | "CommonDataServiceForAppsEntity" + | "Office365Table" + | "MongoDbCollection" + | "MongoDbV2Collection" + | "CosmosDbMongoDbApiCollection" + | "ODataResource" + | "OracleTable" + | "TeradataTable" + | "AzureMySqlTable" + | "AmazonRedshiftTable" + | "Db2Table" + | "RelationalTable" + | "InformixTable" + | "OdbcTable" + | "MySqlTable" + | "PostgreSqlTable" + | "MicrosoftAccessTable" + | "SalesforceObject" + | "SalesforceServiceCloudObject" + | "SybaseTable" + | "SapBwCube" + | "SapCloudForCustomerResource" + | "SapEccResource" + | "SapHanaTable" + | "SapOpenHubTable" + | "SqlServerTable" + | "RestResource" + | "SapTableResource" + | "WebTable" + | "AzureSearchIndex" + | "AmazonMWSObject" + | "AzurePostgreSqlTable" + | "ConcurObject" + | "CouchbaseTable" + | "DrillTable" + | "EloquaObject" + | "GoogleBigQueryObject" + | "GreenplumTable" + | "HBaseObject" + | "HiveObject" + | "HubspotObject" + | "ImpalaObject" + | "JiraObject" + | "MagentoObject" + | "MariaDBTable" + | "AzureMariaDBTable" + | "MarketoObject" + | "PaypalObject" + | "PhoenixObject" + | "PrestoObject" + | "QuickBooksObject" + | "ServiceNowObject" + | "ShopifyObject" + | "SparkObject" + | "SquareObject" + | "XeroObject" + | "ZohoObject" + | "NetezzaTable" + | "VerticaTable" + | "SalesforceMarketingCloudObject" + | "ResponsysObject" + | "DynamicsAXResource" + | "OracleServiceCloudObject" + | "AzureDataExplorerTable" + | "GoogleAdWordsObject"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Dataset description. + */ + description?: string; + /** + * Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. + */ + structure?: any; + /** + * Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. + */ + schema?: any; + /** + * Linked service reference. + */ + linkedServiceName: LinkedServiceReference; + /** + * Parameters for dataset. + */ + parameters?: { [propertyName: string]: ParameterSpecification }; + /** + * List of tags that can be used for describing the Dataset. + */ + annotations?: any[]; + /** + * The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + */ + folder?: DatasetFolder; +} + +/** + * Linked service reference type. + */ +export interface LinkedServiceReference { + /** + * Linked service reference type. + */ + type: Type; + /** + * Reference LinkedService name. + */ + referenceName: string; + /** + * Arguments for LinkedService. + */ + parameters?: { [propertyName: string]: any }; +} + +/** + * The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + */ +export interface DatasetFolder { + /** + * The name of the folder that this Dataset is in. + */ + name?: string; +} + +/** + * A list of pipeline resources. + */ +export interface PipelineListResponse { + /** + * List of pipelines. + */ + value: PipelineResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * A pipeline activity. + */ +export interface Activity { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "Container" + | "Execution" + | "Copy" + | "HDInsightHive" + | "HDInsightPig" + | "HDInsightMapReduce" + | "HDInsightStreaming" + | "HDInsightSpark" + | "ExecuteSSISPackage" + | "Custom" + | "SqlServerStoredProcedure" + | "ExecutePipeline" + | "Delete" + | "AzureDataExplorerCommand" + | "Lookup" + | "WebActivity" + | "GetMetadata" + | "IfCondition" + | "Switch" + | "ForEach" + | "AzureMLBatchExecution" + | "AzureMLUpdateResource" + | "AzureMLExecutePipeline" + | "DataLakeAnalyticsU-SQL" + | "Wait" + | "Until" + | "Validation" + | "Filter" + | "DatabricksNotebook" + | "DatabricksSparkJar" + | "DatabricksSparkPython" + | "SetVariable" + | "AppendVariable" + | "AzureFunctionActivity" + | "WebHook" + | "ExecuteDataFlow" + | "SynapseNotebook" + | "SparkJob" + | "SqlPoolStoredProcedure"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Activity name. + */ + name: string; + /** + * Activity description. + */ + description?: string; + /** + * Activity depends on condition. + */ + dependsOn?: ActivityDependency[]; + /** + * Activity user properties. + */ + userProperties?: UserProperty[]; +} + +/** + * Activity dependency information. + */ +export interface ActivityDependency { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Activity name. + */ + activity: string; + /** + * Match-Condition for the dependency. + */ + dependencyConditions: DependencyCondition[]; +} + +/** + * User property. + */ +export interface UserProperty { + /** + * User property name. + */ + name: string; + /** + * User property value. Type: string (or Expression with resultType string). + */ + value: any; +} + +/** + * Definition of a single variable for a Pipeline. + */ +export interface VariableSpecification { + /** + * Variable type. + */ + type: VariableType; + /** + * Default value of variable. + */ + defaultValue?: any; +} + +/** + * The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + */ +export interface PipelineFolder { + /** + * The name of the folder that this Pipeline is in. + */ + name?: string; +} + +/** + * Response body with a run identifier. + */ +export interface CreateRunResponse { + /** + * Identifier of a run. + */ + runId: string; +} + +/** + * Query parameters for listing runs. + */ +export interface RunFilterParameters { + /** + * The continuation token for getting the next page of results. Null for first page. + */ + continuationToken?: string; + /** + * The time at or after which the run event was updated in 'ISO 8601' format. + */ + lastUpdatedAfter: Date; + /** + * The time at or before which the run event was updated in 'ISO 8601' format. + */ + lastUpdatedBefore: Date; + /** + * List of filters. + */ + filters?: RunQueryFilter[]; + /** + * List of OrderBy option. + */ + orderBy?: RunQueryOrderBy[]; +} + +/** + * Query filter option for listing runs. + */ +export interface RunQueryFilter { + /** + * Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. + */ + operand: RunQueryFilterOperand; + /** + * Operator to be used for filter. + */ + operator: RunQueryFilterOperator; + /** + * List of filter values. + */ + values: string[]; +} + +/** + * An object to provide order by options for listing runs. + */ +export interface RunQueryOrderBy { + /** + * Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. + */ + orderBy: RunQueryOrderByField; + /** + * Sorting order of the parameter. + */ + order: RunQueryOrder; +} + +/** + * A list pipeline runs. + */ +export interface PipelineRunsQueryResponse { + /** + * List of pipeline runs. + */ + value: PipelineRun[]; + /** + * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. + */ + continuationToken?: string; +} + +/** + * Information about a pipeline run. + */ +export interface PipelineRun { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Identifier of a run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runId?: string; + /** + * Identifier that correlates all the recovery runs of a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runGroupId?: string; + /** + * Indicates if the recovered pipeline run is the latest in its group. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly isLatest?: boolean; + /** + * The pipeline name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly pipelineName?: string; + /** + * The full or partial list of parameter name, value pair used in the pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly parameters?: { [propertyName: string]: string }; + /** + * Entity that started the pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly invokedBy?: PipelineRunInvokedBy; + /** + * The last updated timestamp for the pipeline run event in ISO8601 format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly lastUpdated?: Date; + /** + * The start time of a pipeline run in ISO8601 format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runStart?: Date; + /** + * The end time of a pipeline run in ISO8601 format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runEnd?: Date; + /** + * The duration of a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly durationInMs?: number; + /** + * The status of a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** + * The message from a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; +} + +/** + * Provides entity name and id that started the pipeline run. + */ +export interface PipelineRunInvokedBy { + /** + * Name of the entity that started the pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * The ID of the entity that started the run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The type of the entity that started the run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly invokedByType?: string; +} + +/** + * A list activity runs. + */ +export interface ActivityRunsQueryResponse { + /** + * List of activity runs. + */ + value: ActivityRun[]; + /** + * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. + */ + continuationToken?: string; +} + +/** + * Information about an activity run in a pipeline. + */ +export interface ActivityRun { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The name of the pipeline. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly pipelineName?: string; + /** + * The id of the pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly pipelineRunId?: string; + /** + * The name of the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityName?: string; + /** + * The type of the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityType?: string; + /** + * The id of the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityRunId?: string; + /** + * The name of the compute linked service. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly linkedServiceName?: string; + /** + * The status of the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** + * The start time of the activity run in 'ISO 8601' format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityRunStart?: Date; + /** + * The end time of the activity run in 'ISO 8601' format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityRunEnd?: Date; + /** + * The duration of the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly durationInMs?: number; + /** + * The input for the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly input?: any; + /** + * The output for the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly output?: any; + /** + * The error if any from the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly error?: any; +} + +/** + * A list of trigger resources. + */ +export interface TriggerListResponse { + /** + * List of triggers. + */ + value: TriggerResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Azure Synapse nested object which contains information about creating pipeline run + */ +export interface Trigger { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "RerunTumblingWindowTrigger" + | "MultiplePipelineTrigger" + | "ScheduleTrigger" + | "BlobTrigger" + | "BlobEventsTrigger" + | "TumblingWindowTrigger" + | "ChainingTrigger"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Trigger description. + */ + description?: string; + /** + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runtimeState?: TriggerRuntimeState; + /** + * List of tags that can be used for describing the trigger. + */ + annotations?: any[]; +} + +/** + * Defines the response of a trigger subscription operation. + */ +export interface TriggerSubscriptionOperationStatus { + /** + * Trigger name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerName?: string; + /** + * Event Subscription Status. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: EventSubscriptionStatus; +} + +/** + * A list of trigger runs. + */ +export interface TriggerRunsQueryResponse { + /** + * List of trigger runs. + */ + value: TriggerRun[]; + /** + * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. + */ + continuationToken?: string; +} + +/** + * Trigger runs. + */ +export interface TriggerRun { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Trigger run id. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerRunId?: string; + /** + * Trigger name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerName?: string; + /** + * Trigger type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerType?: string; + /** + * Trigger run start time. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerRunTimestamp?: Date; + /** + * Trigger run status. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: TriggerRunStatus; + /** + * Trigger error message. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; + /** + * List of property name and value related to trigger run. Name, value pair depends on type of trigger. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly properties?: { [propertyName: string]: string }; + /** + * List of pipeline name and run Id triggered by the trigger run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggeredPipelines?: { [propertyName: string]: string }; +} + +/** + * Azure Synapse nested object which contains a flow with data movements and transformations. + */ +export interface DataFlow { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MappingDataFlow"; + /** + * The description of the data flow. + */ + description?: string; + /** + * List of tags that can be used for describing the data flow. + */ + annotations?: any[]; + /** + * The folder that this data flow is in. If not specified, Data flow will appear at the root level. + */ + folder?: DataFlowFolder; +} + +/** + * The folder that this data flow is in. If not specified, Data flow will appear at the root level. + */ +export interface DataFlowFolder { + /** + * The name of the folder that this data flow is in. + */ + name?: string; +} + +/** + * A list of data flow resources. + */ +export interface DataFlowListResponse { + /** + * List of data flows. + */ + value: DataFlowResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Request body structure for creating data flow debug session. + */ +export interface CreateDataFlowDebugSessionRequest { + /** + * The name of the data flow. + */ + dataFlowName?: string; + /** + * The ID of existing Databricks cluster. + */ + existingClusterId?: string; + /** + * Timeout setting for Databricks cluster. + */ + clusterTimeout?: number; + /** + * The name of new Databricks cluster. + */ + newClusterName?: string; + /** + * The type of new Databricks cluster. + */ + newClusterNodeType?: string; + /** + * Data bricks linked service. + */ + dataBricksLinkedService?: LinkedServiceResource; +} + +/** + * Response body structure for creating data flow debug session. + */ +export interface CreateDataFlowDebugSessionResponse { + /** + * The ID of data flow debug session. + */ + sessionId?: string; +} + +/** + * A list of active debug sessions. + */ +export interface QueryDataFlowDebugSessionsResponse { + /** + * Array with all active debug sessions. + */ + value?: DataFlowDebugSessionInfo[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Data flow debug session info. + */ +export interface DataFlowDebugSessionInfo { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The name of the data flow. + */ + dataFlowName?: string; + /** + * Compute type of the cluster. + */ + computeType?: string; + /** + * Core count of the cluster. + */ + coreCount?: number; + /** + * Node count of the cluster. (deprecated property) + */ + nodeCount?: number; + /** + * Attached integration runtime name of data flow debug session. + */ + integrationRuntimeName?: string; + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * Start time of data flow debug session. + */ + startTime?: string; + /** + * Compute type of the cluster. + */ + timeToLiveInMinutes?: number; + /** + * Last activity time of data flow debug session. + */ + lastActivityTime?: string; +} + +/** + * Request body structure for starting data flow debug session. + */ +export interface DataFlowDebugPackage { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * Data flow instance. + */ + dataFlow?: DataFlowDebugResource; + /** + * List of datasets. + */ + datasets?: DatasetDebugResource[]; + /** + * List of linked services. + */ + linkedServices?: LinkedServiceDebugResource[]; + /** + * Staging info for debug session. + */ + staging?: DataFlowStagingInfo; + /** + * Data flow debug settings. + */ + debugSettings?: DataFlowDebugPackageDebugSettings; +} + +/** + * Azure Synapse nested debug resource. + */ +export interface SubResourceDebugResource { + /** + * The resource name. + */ + name?: string; +} + +/** + * Staging info for execute data flow activity. + */ +export interface DataFlowStagingInfo { + /** + * Staging linked service reference. + */ + linkedService?: LinkedServiceReference; + /** + * Folder path for staging blob. + */ + folderPath?: string; +} + +/** + * Data flow debug settings. + */ +export interface DataFlowDebugPackageDebugSettings { + /** + * Source setting for data flow debug. + */ + sourceSettings?: DataFlowSourceSetting[]; + /** + * Data flow parameters. + */ + parameters?: { [propertyName: string]: any }; + /** + * Parameters for dataset. + */ + datasetParameters?: any; +} + +/** + * Definition of data flow source setting for debug. + */ +export interface DataFlowSourceSetting { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The data flow source name. + */ + sourceName?: string; + /** + * Defines the row limit of data flow source in debug. + */ + rowLimit?: number; +} + +/** + * Response body structure for starting data flow debug session. + */ +export interface AddDataFlowToDebugSessionResponse { + /** + * The ID of data flow debug job version. + */ + jobVersion?: string; +} + +/** + * Request body structure for deleting data flow debug session. + */ +export interface DeleteDataFlowDebugSessionRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; +} + +/** + * Request body structure for data flow expression preview. + */ +export interface DataFlowDebugCommandRequest { + /** + * The ID of data flow debug session. + */ + sessionId: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; + /** + * The command name. + */ + commandName?: string; + /** + * The command payload object. + */ + commandPayload: any; +} + +/** + * Response body structure of data flow result for data preview, statistics or expression preview. + */ +export interface DataFlowDebugCommandResponse { + /** + * The run status of data preview, statistics or expression preview. + */ + status?: string; + /** + * The result data of data preview, statistics or expression preview. + */ + data?: string; +} + +/** + * A list of sql scripts resources. + */ +export interface SqlScriptsListResponse { + /** + * List of sql scripts. + */ + value: SqlScriptResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Sql Script resource type. + */ +export interface SqlScriptResource { + /** + * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + */ + name: string; + /** + * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Resource Etag. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly etag?: string; + /** + * Properties of sql script. + */ + properties: SqlScript; +} + +/** + * SQL script. + */ +export interface SqlScript { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The description of the SQL script. + */ + description?: string; + /** + * The type of the SQL script. + */ + type?: SqlScriptType; + /** + * The content of the SQL script. + */ + content: SqlScriptContent; +} + +/** + * The content of the SQL script. + */ +export interface SqlScriptContent { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * SQL query to execute. + */ + query: string; + /** + * The connection used to execute the SQL script. + */ + currentConnection: SqlConnection; + /** + * The metadata of the SQL script. + */ + metadata?: SqlScriptMetadata; +} + +/** + * The connection used to execute the SQL script. + */ +export interface SqlConnection { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The type of the connection. + */ + type: SqlConnectionType; + /** + * The identifier of the connection. + */ + name: string; +} + +/** + * The metadata of the SQL script. + */ +export interface SqlScriptMetadata { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The language of the SQL script. + */ + language?: string; +} + +/** + * A list of spark job definitions resources. + */ +export interface SparkJobDefinitionsListResponse { + /** + * List of spark job definitions. + */ + value: SparkJobDefinitionResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Spark job definition. + */ +export interface SparkJobDefinition { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The description of the Spark job definition. + */ + description?: string; + /** + * Big data pool reference. + */ + targetBigDataPool: BigDataPoolReference; + /** + * The required Spark version of the application. + */ + requiredSparkVersion?: string; + /** + * The language of the Spark application. + */ + language?: string; + /** + * The properties of the Spark job. + */ + jobProperties: SparkJobProperties; +} + +/** + * Big data pool reference. + */ +export interface BigDataPoolReference { + /** + * Big data pool reference type. + */ + type: BigDataPoolReferenceType; + /** + * Reference big data pool name. + */ + referenceName: string; +} + +/** + * The properties of the Spark job. + */ +export interface SparkJobProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The name of the job. + */ + name?: string; + /** + * File containing the application to execute. + */ + file: string; + /** + * Main class for Java/Scala application. + */ + className?: string; + /** + * Spark configuration properties. + */ + conf?: any; + /** + * Command line arguments for the application. + */ + args?: string[]; + /** + * Jars to be used in this job. + */ + jars?: string[]; + /** + * files to be used in this job. + */ + files?: string[]; + /** + * Archives to be used in this job. + */ + archives?: string[]; + /** + * Amount of memory to use for the driver process. + */ + driverMemory: string; + /** + * Number of cores to use for the driver. + */ + driverCores: number; + /** + * Amount of memory to use per executor process. + */ + executorMemory: string; + /** + * Number of cores to use for each executor. + */ + executorCores: number; + /** + * Number of executors to launch for this job. + */ + numExecutors: number; +} + +export interface SparkBatchJob { + livyInfo?: SparkBatchJobState; + /** + * The batch name. + */ + name?: string; + /** + * The workspace name. + */ + workspaceName?: string; + /** + * The Spark pool name. + */ + sparkPoolName?: string; + /** + * The submitter name. + */ + submitterName?: string; + /** + * The submitter identifier. + */ + submitterId?: string; + /** + * The artifact identifier. + */ + artifactId?: string; + /** + * The job type. + */ + jobType?: SparkJobType; + /** + * The Spark batch job result. + */ + result?: SparkBatchJobResultType; + /** + * The scheduler information. + */ + scheduler?: SparkScheduler; + /** + * The plugin information. + */ + plugin?: SparkServicePlugin; + /** + * The error information. + */ + errors?: SparkServiceError[]; + /** + * The tags. + */ + tags?: { [propertyName: string]: string }; + /** + * The session Id. + */ + id: number; + /** + * The application id of this session + */ + appId?: string | null; + /** + * The detailed application info. + */ + appInfo?: { [propertyName: string]: string } | null; + /** + * The batch state + */ + state?: string; + /** + * The log lines. + */ + logLines?: string[] | null; +} + +export interface SparkBatchJobState { + /** + * the time that at which "not_started" livy state was first seen. + */ + notStartedAt?: Date | null; + /** + * the time that at which "starting" livy state was first seen. + */ + startingAt?: Date | null; + /** + * the time that at which "running" livy state was first seen. + */ + runningAt?: Date | null; + /** + * time that at which "dead" livy state was first seen. + */ + deadAt?: Date | null; + /** + * the time that at which "success" livy state was first seen. + */ + successAt?: Date | null; + /** + * the time that at which "killed" livy state was first seen. + */ + terminatedAt?: Date | null; + /** + * the time that at which "recovering" livy state was first seen. + */ + recoveringAt?: Date | null; + /** + * the Spark job state. + */ + currentState?: string; + jobCreationRequest?: SparkRequest; +} + +export interface SparkRequest { + name?: string; + file?: string; + className?: string; + arguments?: string[]; + jars?: string[]; + pythonFiles?: string[]; + files?: string[]; + archives?: string[]; + /** + * Dictionary of + */ + configuration?: { [propertyName: string]: string }; + driverMemory?: string; + driverCores?: number; + executorMemory?: string; + executorCores?: number; + executorCount?: number; +} + +export interface SparkScheduler { + submittedAt?: Date | null; + scheduledAt?: Date | null; + endedAt?: Date | null; + cancellationRequestedAt?: Date; + currentState?: SchedulerCurrentState; +} + +export interface SparkServicePlugin { + preparationStartedAt?: Date | null; + resourceAcquisitionStartedAt?: Date | null; + submissionStartedAt?: Date | null; + monitoringStartedAt?: Date | null; + cleanupStartedAt?: Date | null; + currentState?: PluginCurrentState; +} + +export interface SparkServiceError { + message?: string; + errorCode?: string; + source?: SparkErrorSource; +} + +/** + * A list of Notebook resources. + */ +export interface NotebookListResponse { + /** + * List of Notebooks. + */ + value: NotebookResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Notebook resource type. + */ +export interface NotebookResource { + /** + * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + */ + name: string; + /** + * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Resource Etag. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly etag?: string; + /** + * Properties of Notebook. + */ + properties: Notebook; +} + +/** + * Notebook. + */ +export interface Notebook { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The description of the notebook. + */ + description?: string; + /** + * Big data pool reference. + */ + bigDataPool?: BigDataPoolReference | null; + /** + * Session properties. + */ + sessionProperties?: NotebookSessionProperties | null; + /** + * Notebook root-level metadata. + */ + metadata: NotebookMetadata; + /** + * Notebook format (major number). Incremented between backwards incompatible changes to the notebook format. + */ + nbformat: number; + /** + * Notebook format (minor number). Incremented for backward compatible changes to the notebook format. + */ + nbformatMinor: number; + /** + * Array of cells of the current notebook. + */ + cells: NotebookCell[]; +} + +/** + * Session properties. + */ +export interface NotebookSessionProperties { + /** + * Amount of memory to use for the driver process. + */ + driverMemory: string; + /** + * Number of cores to use for the driver. + */ + driverCores: number; + /** + * Amount of memory to use per executor process. + */ + executorMemory: string; + /** + * Number of cores to use for each executor. + */ + executorCores: number; + /** + * Number of executors to launch for this session. + */ + numExecutors: number; +} + +/** + * Notebook root-level metadata. + */ +export interface NotebookMetadata { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Kernel information. + */ + kernelspec?: NotebookKernelSpec; + /** + * Language info. + */ + languageInfo?: NotebookLanguageInfo; +} + +/** + * Kernel information. + */ +export interface NotebookKernelSpec { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Name of the kernel specification. + */ + name: string; + /** + * Name to display in UI. + */ + displayName: string; +} + +/** + * Language info. + */ +export interface NotebookLanguageInfo { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The programming language which this kernel runs. + */ + name: string; + /** + * The codemirror mode to use for code in this language. + */ + codemirrorMode?: string; +} + +/** + * Notebook cell. + */ +export interface NotebookCell { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * String identifying the type of cell. + */ + cellType: string; + /** + * Cell-level metadata. + */ + metadata: any; + /** + * Contents of the cell, represented as an array of lines. + */ + source: string[]; + /** + * Attachments associated with the cell. + */ + attachments?: any; + /** + * Cell-level output items. + */ + outputs?: NotebookCellOutputItem[]; +} + +/** + * An item of the notebook cell execution output. + */ +export interface NotebookCellOutputItem { + /** + * For output_type=stream, determines the name of stream (stdout / stderr). + */ + name?: string; + /** + * Execution sequence number. + */ + executionCount?: number; + /** + * Execution, display, or stream outputs. + */ + outputType: CellOutputType; + /** + * For output_type=stream, the stream's text output, represented as a string or an array of strings. + */ + text?: any; + /** + * Output data. Use MIME type as key, and content as value. + */ + data?: any; + /** + * Metadata for the output item. + */ + metadata?: any; +} + +/** + * Details of the data lake storage account associated with the workspace + */ +export interface DataLakeStorageAccountDetails { + /** + * Account URL + */ + accountUrl?: string; + /** + * Filesystem name + */ + filesystem?: string; +} + +/** + * Virtual Network Profile + */ +export interface VirtualNetworkProfile { + /** + * Subnet ID used for computes in workspace + */ + computeSubnetId?: string; +} + +/** + * Private endpoint details + */ +export interface PrivateEndpoint { + /** + * Resource id of the private endpoint. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; +} + +/** + * Connection state details of the private endpoint + */ +export interface PrivateLinkServiceConnectionState { + /** + * The private link service connection status. + */ + status?: string; + /** + * The private link service connection description. + */ + description?: string; + /** + * The actions required for private link service connection. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly actionsRequired?: string; +} + +/** + * Details of the encryption associated with the workspace + */ +export interface EncryptionDetails { + /** + * Double Encryption enabled + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly doubleEncryptionEnabled?: boolean; + /** + * Customer Managed Key Details + */ + cmk?: CustomerManagedKeyDetails; +} + +/** + * Details of the customer managed key associated with the workspace + */ +export interface CustomerManagedKeyDetails { + /** + * The customer managed key status on the workspace + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** + * The key object of the workspace + */ + key?: WorkspaceKeyDetails; +} + +/** + * Details of the customer managed key associated with the workspace + */ +export interface WorkspaceKeyDetails { + /** + * Workspace Key sub-resource name + */ + name?: string; + /** + * Workspace Key sub-resource key vault url + */ + keyVaultUrl?: string; +} + +/** + * Managed Virtual Network Settings + */ +export interface ManagedVirtualNetworkSettings { + /** + * Prevent Data Exfiltration + */ + preventDataExfiltration?: boolean; + /** + * Linked Access Check On Target Resource + */ + linkedAccessCheckOnTargetResource?: boolean; + /** + * Allowed Aad Tenant Ids For Linking + */ + allowedAadTenantIdsForLinking?: string[]; +} + +/** + * Git integration settings + */ +export interface WorkspaceRepositoryConfiguration { + /** + * Type of workspace repositoryID configuration. Example WorkspaceVSTSConfiguration, WorkspaceGitHubConfiguration + */ + type?: string; + /** + * GitHub Enterprise host name. For example: https://github.mydomain.com + */ + hostName?: string; + /** + * Account name + */ + accountName?: string; + /** + * VSTS project name + */ + projectName?: string; + /** + * Repository name + */ + repositoryName?: string; + /** + * Collaboration branch + */ + collaborationBranch?: string; + /** + * Root folder to use in the repository + */ + rootFolder?: string; +} + +/** + * Purview Configuration + */ +export interface PurviewConfiguration { + /** + * Purview Resource ID + */ + purviewResourceId?: string; +} + +/** + * The workspace managed identity + */ +export interface ManagedIdentity { + /** + * The principal ID of the workspace managed identity + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly principalId?: string; + /** + * The tenant ID of the workspace managed identity + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly tenantId?: string; + /** + * The type of managed identity for the workspace + */ + type?: ResourceIdentityType; +} + +/** + * Contains details when the response code indicates an error. + */ +export interface ErrorContract { + /** + * The error details. + */ + error?: ErrorResponse; +} + +/** + * Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.) + */ +export interface ErrorResponse { + /** + * The error code. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly code?: string; + /** + * The error message. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; + /** + * The error target. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly target?: string; + /** + * The error details. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly details?: ErrorResponse[]; + /** + * The error additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly additionalInfo?: ErrorAdditionalInfo[]; +} + +/** + * The resource management error additional info. + */ +export interface ErrorAdditionalInfo { + /** + * The additional info type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * The additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly info?: any; +} + +/** + * List of SQL pools + */ +export interface SqlPoolInfoListResult { + /** + * Link to the next page of results + */ + nextLink?: string; + /** + * List of SQL pools + */ + value?: SqlPool[]; +} + +/** + * SQL pool SKU + */ +export interface Sku { + /** + * The service tier + */ + tier?: string; + /** + * The SKU name + */ + name?: string; + /** + * If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted. + */ + capacity?: number; +} + +/** + * Collection of Big Data pool information + */ +export interface BigDataPoolResourceInfoListResult { + /** + * Link to the next page of results + */ + nextLink?: string; + /** + * List of Big Data pools + */ + value?: BigDataPoolResourceInfo[]; +} + +/** + * Auto-scaling properties of a Big Data pool powered by Apache Spark + */ +export interface AutoScaleProperties { + /** + * The minimum number of nodes the Big Data pool can support. + */ + minNodeCount?: number; + /** + * Whether automatic scaling is enabled for the Big Data pool. + */ + enabled?: boolean; + /** + * The maximum number of nodes the Big Data pool can support. + */ + maxNodeCount?: number; +} + +/** + * Auto-pausing properties of a Big Data pool powered by Apache Spark + */ +export interface AutoPauseProperties { + /** + * Number of minutes of idle time before the Big Data pool is automatically paused. + */ + delayInMinutes?: number; + /** + * Whether auto-pausing is enabled for the Big Data pool. + */ + enabled?: boolean; +} + +/** + * Library requirements for a Big Data pool powered by Apache Spark + */ +export interface LibraryRequirements { + /** + * The last update time of the library requirements file. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly time?: Date; + /** + * The library requirements. + */ + content?: string; + /** + * The filename of the library requirements file. + */ + filename?: string; +} + +/** + * A list of integration runtime resources. + */ +export interface IntegrationRuntimeListResponse { + /** + * List of integration runtimes. + */ + value: IntegrationRuntimeResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Azure Synapse nested object which serves as a compute resource for activities. + */ +export interface IntegrationRuntime { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Managed" | "SelfHosted"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Integration runtime description. + */ + description?: string; +} + +export interface GitHubAccessTokenRequest { + /** + * The GitHub Client Id. + */ + gitHubClientId: string; + /** + * The GitHub Access code. + */ + gitHubAccessCode: string; + /** + * The GitHub access token base URL. + */ + gitHubAccessTokenBaseUrl: string; +} + +export interface GitHubAccessTokenResponse { + gitHubAccessToken?: string; +} + +/** + * Azure Synapse expression definition. + */ +export interface Expression { + /** + * Expression type. + */ + type: ExpressionType; + /** + * Expression value. + */ + value: string; +} + +/** + * The base definition of a secret type. + */ +export interface SecretBase { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SecureString" | "AzureKeyVaultSecret"; +} + +/** + * Request body structure for starting data flow debug session. + */ +export interface StartDataFlowDebugSessionRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * Data flow instance. + */ + dataFlow?: DataFlowResource; + /** + * List of datasets. + */ + datasets?: DatasetResource[]; + /** + * List of linked services. + */ + linkedServices?: LinkedServiceResource[]; + /** + * Staging info for debug session. + */ + staging?: any; + /** + * Data flow debug settings. + */ + debugSettings?: any; + /** + * The type of new Databricks cluster. + */ + incrementalDebug?: boolean; +} + +/** + * Response body structure for starting data flow debug session. + */ +export interface StartDataFlowDebugSessionResponse { + /** + * The ID of data flow debug job version. + */ + jobVersion?: string; +} + +/** + * Request body structure for data flow preview data. + */ +export interface DataFlowDebugPreviewDataRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; + /** + * The output stream name. + */ + streamName?: string; + /** + * The row limit for preview request. + */ + rowLimits?: number; +} + +/** + * Request body structure for data flow statistics. + */ +export interface DataFlowDebugStatisticsRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; + /** + * The output stream name. + */ + streamName?: string; + /** + * List of column names. + */ + columns?: string[]; +} + +/** + * Request body structure for data flow expression preview. + */ +export interface EvaluateDataFlowExpressionRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; + /** + * The output stream name. + */ + streamName?: string; + /** + * The row limit for preview request. + */ + rowLimits?: number; + /** + * The expression for preview. + */ + expression?: string; +} + +/** + * Response body structure of data flow query for data preview, statistics or expression preview. + */ +export interface DataFlowDebugQueryResponse { + /** + * The run ID of data flow debug session. + */ + runId?: string; +} + +/** + * Response body structure of data flow result for data preview, statistics or expression preview. + */ +export interface DataFlowDebugResultResponse { + /** + * The run status of data preview, statistics or expression preview. + */ + status?: string; + /** + * The result data of data preview, statistics or expression preview. + */ + data?: string; +} + +/** + * Defines the response of a provision trigger dependency operation. + */ +export interface TriggerDependencyProvisioningStatus { + /** + * Trigger name. + */ + triggerName: string; + /** + * Provisioning status. + */ + provisioningStatus: string; +} + +/** + * Pipeline reference type. + */ +export interface PipelineReference { + /** + * Pipeline reference type. + */ + type: PipelineReferenceType; + /** + * Reference pipeline name. + */ + referenceName: string; + /** + * Reference name. + */ + name?: string; +} + +/** + * Pipeline that needs to be triggered with the given parameters. + */ +export interface TriggerPipelineReference { + /** + * Pipeline reference. + */ + pipelineReference?: PipelineReference; + /** + * Pipeline parameters. + */ + parameters?: { [propertyName: string]: any }; +} + +/** + * Parameters for updating a workspace resource. + */ +export interface WorkspaceUpdateParameters { + /** + * The resource tags. + */ + tags?: { [propertyName: string]: string }; + /** + * Managed service identity of the workspace. + */ + identity?: WorkspaceIdentity; +} + +/** + * Identity properties of the workspace resource. + */ +export interface WorkspaceIdentity { + /** + * The identity type. Currently the only supported type is 'SystemAssigned'. + */ + type: "SystemAssigned"; + /** + * The principal id of the identity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly principalId?: string; + /** + * The client tenant id of the identity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly tenantId?: string; +} + +/** + * Dataset reference type. + */ +export interface DatasetReference { + /** + * Dataset reference type. + */ + type: DatasetReferenceType; + /** + * Reference dataset name. + */ + referenceName: string; + /** + * Arguments for dataset. + */ + parameters?: { [propertyName: string]: any }; +} + +/** + * Data flow reference type. + */ +export interface DataFlowReference { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Data flow reference type. + */ + type: DataFlowReferenceType; + /** + * Reference data flow name. + */ + referenceName: string; + /** + * Reference data flow parameters from dataset. + */ + datasetParameters?: any; +} + +/** + * Rerun tumbling window trigger Parameters. + */ +export interface RerunTumblingWindowTriggerActionParameters { + /** + * The start time for the time period for which restatement is initiated. Only UTC time is currently supported. + */ + startTime: Date; + /** + * The end time for the time period for which restatement is initiated. Only UTC time is currently supported. + */ + endTime: Date; + /** + * The max number of parallel time windows (ready for execution) for which a rerun is triggered. + */ + maxConcurrency: number; +} + +/** + * A list of rerun triggers. + */ +export interface RerunTriggerListResponse { + /** + * List of rerun triggers. + */ + value: RerunTriggerResource[]; + /** + * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly nextLink?: string; +} + +/** + * The request payload of get SSIS object metadata. + */ +export interface GetSsisObjectMetadataRequest { + /** + * Metadata path. + */ + metadataPath?: string; +} + +/** + * The status of the operation. + */ +export interface SsisObjectMetadataStatusResponse { + /** + * The status of the operation. + */ + status?: string; + /** + * The operation name. + */ + name?: string; + /** + * The operation properties. + */ + properties?: string; + /** + * The operation error message. + */ + error?: string; +} + +/** + * The exposure control request. + */ +export interface ExposureControlRequest { + /** + * The feature name. + */ + featureName?: string; + /** + * The feature type. + */ + featureType?: string; +} + +/** + * The exposure control response. + */ +export interface ExposureControlResponse { + /** + * The feature name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly featureName?: string; + /** + * The feature value. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly value?: string; +} + +/** + * Synapse notebook reference type. + */ +export interface SynapseNotebookReference { + /** + * Synapse notebook reference type. + */ + type: NotebookReferenceType; + /** + * Reference notebook name. + */ + referenceName: string; +} + +/** + * Synapse spark job reference type. + */ +export interface SynapseSparkJobReference { + /** + * Synapse spark job reference type. + */ + type: SparkJobReferenceType; + /** + * Reference spark job name. + */ + referenceName: string; +} + +/** + * SQL pool reference type. + */ +export interface SqlPoolReference { + /** + * SQL pool reference type. + */ + type: SqlPoolReferenceType; + /** + * Reference SQL pool name. + */ + referenceName: string; +} + +/** + * A data flow transformation. + */ +export interface Transformation { + /** + * Transformation name. + */ + name: string; + /** + * Transformation description. + */ + description?: string; +} + +/** + * Dataset location. + */ +export interface DatasetLocation { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AzureBlobStorageLocation" + | "AzureBlobFSLocation" + | "AzureDataLakeStoreLocation" + | "AmazonS3Location" + | "FileServerLocation" + | "AzureFileStorageLocation" + | "GoogleCloudStorageLocation" + | "FtpServerLocation" + | "SftpLocation" + | "HttpServerLocation" + | "HdfsLocation"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Specify the folder path of dataset. Type: string (or Expression with resultType string) + */ + folderPath?: any; + /** + * Specify the file name of dataset. Type: string (or Expression with resultType string). + */ + fileName?: any; +} + +/** + * Columns that define the structure of the dataset. + */ +export interface DatasetDataElement { + /** + * Name of the column. Type: string (or Expression with resultType string). + */ + name?: any; + /** + * Type of the column. Type: string (or Expression with resultType string). + */ + type?: any; +} + +/** + * Columns that define the physical type schema of the dataset. + */ +export interface DatasetSchemaDataElement { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Name of the schema column. Type: string (or Expression with resultType string). + */ + name?: any; + /** + * Type of the schema column. Type: string (or Expression with resultType string). + */ + type?: any; +} + +/** + * The format definition of a storage. + */ +export interface DatasetStorageFormat { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "TextFormat" + | "JsonFormat" + | "AvroFormat" + | "OrcFormat" + | "ParquetFormat"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Serializer. Type: string (or Expression with resultType string). + */ + serializer?: any; + /** + * Deserializer. Type: string (or Expression with resultType string). + */ + deserializer?: any; +} + +/** + * The compression method used on a dataset. + */ +export interface DatasetCompression { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; +} + +/** + * Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so not flattened in SDK models. + */ +export interface WebLinkedServiceTypeProperties { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authenticationType: "Anonymous" | "Basic" | "ClientCertificate"; + /** + * The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). + */ + url: any; +} + +/** + * Custom script action to run on HDI ondemand cluster once it's up. + */ +export interface ScriptAction { + /** + * The user provided name of the script action. + */ + name: string; + /** + * The URI for the script action. + */ + uri: string; + /** + * The node types on which the script action should be executed. + */ + roles: HdiNodeTypes; + /** + * The parameters for the script action. + */ + parameters?: string; +} + +/** + * Execution policy for an activity. + */ +export interface ActivityPolicy { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + timeout?: any; + /** + * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. + */ + retry?: any; + /** + * Interval between each retry attempt (in seconds). The default is 30 sec. + */ + retryIntervalInSeconds?: number; + /** + * When set to true, Input from activity is considered as secure and will not be logged to monitoring. + */ + secureInput?: boolean; + /** + * When set to true, Output from activity is considered as secure and will not be logged to monitoring. + */ + secureOutput?: boolean; +} + +/** + * Connector read setting. + */ +export interface StoreReadSettings { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AzureBlobStorageReadSettings" + | "AzureBlobFSReadSettings" + | "AzureDataLakeStoreReadSettings" + | "AmazonS3ReadSettings" + | "FileServerReadSettings" + | "AzureFileStorageReadSettings" + | "GoogleCloudStorageReadSettings" + | "FtpReadSettings" + | "SftpReadSettings" + | "HttpReadSettings" + | "HdfsReadSettings"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). + */ + maxConcurrentConnections?: any; +} + +/** + * Connector write settings. + */ +export interface StoreWriteSettings { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "SftpWriteSettings" + | "AzureBlobStorageWriteSettings" + | "AzureBlobFSWriteSettings" + | "AzureDataLakeStoreWriteSettings" + | "FileServerWriteSettings"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). + */ + maxConcurrentConnections?: any; + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; +} + +/** + * Distcp settings. + */ +export interface DistcpSettings { + /** + * Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). + */ + resourceManagerEndpoint: any; + /** + * Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). + */ + tempScriptPath: any; + /** + * Specifies the Distcp options. Type: string (or Expression with resultType string). + */ + distcpOptions?: any; +} + +/** + * Format read settings. + */ +export interface FormatReadSettings { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DelimitedTextReadSettings"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; +} + +/** + * Format write settings. + */ +export interface FormatWriteSettings { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AvroWriteSettings" + | "DelimitedTextWriteSettings" + | "JsonWriteSettings"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; +} + +/** + * A copy activity source. + */ +export interface CopySource { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AvroSource" + | "ParquetSource" + | "DelimitedTextSource" + | "JsonSource" + | "OrcSource" + | "BinarySource" + | "TabularSource" + | "AzureTableSource" + | "BlobSource" + | "DocumentDbCollectionSource" + | "CosmosDbSqlApiSource" + | "DynamicsSource" + | "DynamicsCrmSource" + | "CommonDataServiceForAppsSource" + | "RelationalSource" + | "InformixSource" + | "MicrosoftAccessSource" + | "Db2Source" + | "OdbcSource" + | "MySqlSource" + | "PostgreSqlSource" + | "SybaseSource" + | "SapBwSource" + | "ODataSource" + | "SalesforceSource" + | "SalesforceServiceCloudSource" + | "SapCloudForCustomerSource" + | "SapEccSource" + | "SapHanaSource" + | "SapOpenHubSource" + | "SapTableSource" + | "RestSource" + | "SqlSource" + | "SqlServerSource" + | "AzureSqlSource" + | "SqlMISource" + | "SqlDWSource" + | "FileSystemSource" + | "HdfsSource" + | "AzureMySqlSource" + | "AzureDataExplorerSource" + | "OracleSource" + | "TeradataSource" + | "WebSource" + | "CassandraSource" + | "MongoDbSource" + | "MongoDbV2Source" + | "CosmosDbMongoDbApiSource" + | "Office365Source" + | "AzureDataLakeStoreSource" + | "AzureBlobFSSource" + | "HttpSource" + | "AmazonMWSSource" + | "AzurePostgreSqlSource" + | "ConcurSource" + | "CouchbaseSource" + | "DrillSource" + | "EloquaSource" + | "GoogleBigQuerySource" + | "GreenplumSource" + | "HBaseSource" + | "HiveSource" + | "HubspotSource" + | "ImpalaSource" + | "JiraSource" + | "MagentoSource" + | "MariaDBSource" + | "AzureMariaDBSource" + | "MarketoSource" + | "PaypalSource" + | "PhoenixSource" + | "PrestoSource" + | "QuickBooksSource" + | "ServiceNowSource" + | "ShopifySource" + | "SparkSource" + | "SquareSource" + | "XeroSource" + | "ZohoSource" + | "NetezzaSource" + | "VerticaSource" + | "SalesforceMarketingCloudSource" + | "ResponsysSource" + | "DynamicsAXSource" + | "OracleServiceCloudSource" + | "GoogleAdWordsSource" + | "AmazonRedshiftSource"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Source retry count. Type: integer (or Expression with resultType integer). + */ + sourceRetryCount?: any; + /** + * Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + sourceRetryWait?: any; + /** + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). + */ + maxConcurrentConnections?: any; +} + +/** + * A copy activity sink. + */ +export interface CopySink { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "DelimitedTextSink" + | "JsonSink" + | "OrcSink" + | "AzurePostgreSqlSink" + | "AzureMySqlSink" + | "SapCloudForCustomerSink" + | "AzureQueueSink" + | "AzureTableSink" + | "AvroSink" + | "ParquetSink" + | "BinarySink" + | "BlobSink" + | "FileSystemSink" + | "DocumentDbCollectionSink" + | "CosmosDbSqlApiSink" + | "SqlSink" + | "SqlServerSink" + | "AzureSqlSink" + | "SqlMISink" + | "SqlDWSink" + | "OracleSink" + | "AzureDataLakeStoreSink" + | "AzureBlobFSSink" + | "AzureSearchIndexSink" + | "OdbcSink" + | "InformixSink" + | "MicrosoftAccessSink" + | "DynamicsSink" + | "DynamicsCrmSink" + | "CommonDataServiceForAppsSink" + | "AzureDataExplorerSink" + | "SalesforceSink" + | "SalesforceServiceCloudSink" + | "CosmosDbMongoDbApiSink"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. + */ + writeBatchSize?: any; + /** + * Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + writeBatchTimeout?: any; + /** + * Sink retry count. Type: integer (or Expression with resultType integer). + */ + sinkRetryCount?: any; + /** + * Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + sinkRetryWait?: any; + /** + * The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). + */ + maxConcurrentConnections?: any; +} + +/** + * Staging settings. + */ +export interface StagingSettings { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Staging linked service reference. + */ + linkedServiceName: LinkedServiceReference; + /** + * The path to storage for storing the interim data. Type: string (or Expression with resultType string). + */ + path?: any; + /** + * Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableCompression?: any; +} + +/** + * Redirect incompatible row settings + */ +export interface RedirectIncompatibleRowSettings { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). + */ + linkedServiceName: any; + /** + * The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). + */ + path?: any; +} + +/** + * The settings that will be leveraged for SAP HANA source partitioning. + */ +export interface SapHanaPartitionSettings { + /** + * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; +} + +/** + * The settings that will be leveraged for SAP table source partitioning. + */ +export interface SapTablePartitionSettings { + /** + * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; + /** + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionUpperBound?: any; + /** + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionLowerBound?: any; + /** + * The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). + */ + maxPartitionsNumber?: any; +} + +/** + * SQL stored procedure parameter. + */ +export interface StoredProcedureParameter { + /** + * Stored procedure parameter value. Type: string (or Expression with resultType string). + */ + value?: any; + /** + * Stored procedure parameter type. + */ + type?: StoredProcedureParameterType; +} + +/** + * The settings that will be leveraged for Oracle source partitioning. + */ +export interface OraclePartitionSettings { + /** + * Names of the physical partitions of Oracle table. + */ + partitionNames?: any; + /** + * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; + /** + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionUpperBound?: any; + /** + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionLowerBound?: any; +} + +/** + * The settings that will be leveraged for teradata source partitioning. + */ +export interface TeradataPartitionSettings { + /** + * The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; + /** + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionUpperBound?: any; + /** + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionLowerBound?: any; +} + +/** + * Cursor methods for Mongodb query + */ +export interface MongoDbCursorMethodsProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). + */ + project?: any; + /** + * Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + */ + sort?: any; + /** + * Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). + */ + skip?: any; + /** + * Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). + */ + limit?: any; +} + +/** + * The settings that will be leveraged for Netezza source partitioning. + */ +export interface NetezzaPartitionSettings { + /** + * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; + /** + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionUpperBound?: any; + /** + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionLowerBound?: any; +} + +/** + * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. + */ +export interface RedshiftUnloadSettings { + /** + * The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. + */ + s3LinkedServiceName: LinkedServiceReference; + /** + * The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). + */ + bucketName: any; +} + +/** + * PolyBase settings. + */ +export interface PolybaseSettings { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Reject type. + */ + rejectType?: PolybaseSettingsRejectType; + /** + * Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. + */ + rejectValue?: any; + /** + * Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. + */ + rejectSampleValue?: any; + /** + * Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). + */ + useTypeDefault?: any; +} + +/** + * DW Copy Command settings. + */ +export interface DWCopyCommandSettings { + /** + * Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). + */ + defaultValues?: DWCopyCommandDefaultValue[]; + /** + * Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" } + */ + additionalOptions?: { [propertyName: string]: string }; +} + +/** + * Default value. + */ +export interface DWCopyCommandDefaultValue { + /** + * Column name. Type: object (or Expression with resultType string). + */ + columnName?: any; + /** + * The default value of the column. Type: object (or Expression with resultType string). + */ + defaultValue?: any; +} + +/** + * Log storage settings. + */ +export interface LogStorageSettings { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Log storage linked service reference. + */ + linkedServiceName: LinkedServiceReference; + /** + * The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). + */ + path?: any; +} + +/** + * A copy activity translator. + */ +export interface CopyTranslator { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TabularTranslator"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; +} + +/** + * SSIS package location. + */ +export interface SsisPackageLocation { + /** + * The SSIS package path. Type: string (or Expression with resultType string). + */ + packagePath?: any; + /** + * The type of SSIS package location. + */ + type?: SsisPackageLocationType; + /** + * Password of the package. + */ + packagePassword?: SecretBaseUnion; + /** + * The package access credential. + */ + accessCredential?: SsisAccessCredential; + /** + * The configuration file of the package execution. Type: string (or Expression with resultType string). + */ + configurationPath?: any; + /** + * The package name. + */ + packageName?: string; + /** + * The embedded package content. Type: string (or Expression with resultType string). + */ + packageContent?: any; + /** + * The embedded package last modified date. + */ + packageLastModifiedDate?: string; + /** + * The embedded child package list. + */ + childPackages?: SsisChildPackage[]; +} + +/** + * SSIS access credential. + */ +export interface SsisAccessCredential { + /** + * Domain for windows authentication. + */ + domain: any; + /** + * UseName for windows authentication. + */ + userName: any; + /** + * Password for windows authentication. + */ + password: SecretBaseUnion; +} + +/** + * SSIS embedded child package. + */ +export interface SsisChildPackage { + /** + * Path for embedded child package. Type: string (or Expression with resultType string). + */ + packagePath: any; + /** + * Name for embedded child package. + */ + packageName?: string; + /** + * Content for embedded child package. Type: string (or Expression with resultType string). + */ + packageContent: any; + /** + * Last modified date for embedded child package. + */ + packageLastModifiedDate?: string; +} + +/** + * SSIS package execution credential. + */ +export interface SsisExecutionCredential { + /** + * Domain for windows authentication. + */ + domain: any; + /** + * UseName for windows authentication. + */ + userName: any; + /** + * Password for windows authentication. + */ + password: SecureString; +} + +/** + * SSIS execution parameter. + */ +export interface SsisExecutionParameter { + /** + * SSIS package execution parameter value. Type: string (or Expression with resultType string). + */ + value: any; +} + +/** + * SSIS property override. + */ +export interface SsisPropertyOverride { + /** + * SSIS package property override value. Type: string (or Expression with resultType string). + */ + value: any; + /** + * Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true + */ + isSensitive?: boolean; +} + +/** + * SSIS package execution log location + */ +export interface SsisLogLocation { + /** + * The SSIS package execution log path. Type: string (or Expression with resultType string). + */ + logPath: any; + /** + * The type of SSIS log location. + */ + type: SsisLogLocationType; + /** + * The package execution log access credential. + */ + accessCredential?: SsisAccessCredential; + /** + * Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + logRefreshInterval?: any; +} + +/** + * Reference objects for custom activity + */ +export interface CustomActivityReferenceObject { + /** + * Linked service references. + */ + linkedServices?: LinkedServiceReference[]; + /** + * Dataset references. + */ + datasets?: DatasetReference[]; +} + +/** + * Web activity authentication properties. + */ +export interface WebActivityAuthentication { + /** + * Web activity authentication (Basic/ClientCertificate/MSI) + */ + type: string; + /** + * Base64-encoded contents of a PFX file. + */ + pfx?: SecretBaseUnion; + /** + * Web activity authentication user name for basic authentication. + */ + username?: string; + /** + * Password for the PFX file or basic authentication. + */ + password?: SecretBaseUnion; + /** + * Resource for which Azure Auth token will be requested when using MSI Authentication. + */ + resource?: string; +} + +/** + * Switch cases with have a value and corresponding activities. + */ +export interface SwitchCase { + /** + * Expected value that satisfies the expression result of the 'on' property. + */ + value?: string; + /** + * List of activities to execute for satisfied case condition. + */ + activities?: ActivityUnion[]; +} + +/** + * Azure ML WebService Input/Output file + */ +export interface AzureMLWebServiceFile { + /** + * The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). + */ + filePath: any; + /** + * Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. + */ + linkedServiceName: LinkedServiceReference; +} + +/** + * Compute properties for data flow activity. + */ +export interface ExecuteDataFlowActivityTypePropertiesCompute { + /** + * Compute type of the cluster which will execute data flow job. + */ + computeType?: DataFlowComputeType; + /** + * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + */ + coreCount?: number; +} + +/** + * The workflow trigger recurrence. + */ +export interface ScheduleTriggerRecurrence { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The frequency. + */ + frequency?: RecurrenceFrequency; + /** + * The interval. + */ + interval?: number; + /** + * The start time. + */ + startTime?: Date; + /** + * The end time. + */ + endTime?: Date; + /** + * The time zone. + */ + timeZone?: string; + /** + * The recurrence schedule. + */ + schedule?: RecurrenceSchedule; +} + +/** + * The recurrence schedule. + */ +export interface RecurrenceSchedule { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The minutes. + */ + minutes?: number[]; + /** + * The hours. + */ + hours?: number[]; + /** + * The days of the week. + */ + weekDays?: DayOfWeek[]; + /** + * The month days. + */ + monthDays?: number[]; + /** + * The monthly occurrences. + */ + monthlyOccurrences?: RecurrenceScheduleOccurrence[]; +} + +/** + * The recurrence schedule occurrence. + */ +export interface RecurrenceScheduleOccurrence { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The day of the week. + */ + day?: DayOfWeek; + /** + * The occurrence. + */ + occurrence?: number; +} + +/** + * Execution policy for an activity. + */ +export interface RetryPolicy { + /** + * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. + */ + count?: any; + /** + * Interval between retries in seconds. Default is 30. + */ + intervalInSeconds?: number; +} + +/** + * Referenced dependency. + */ +export interface DependencyReference { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "TriggerDependencyReference" + | "TumblingWindowTriggerDependencyReference" + | "SelfDependencyTumblingWindowTriggerReference"; +} + +/** + * Trigger reference type. + */ +export interface TriggerReference { + /** + * Trigger reference type. + */ + type: TriggerReferenceType; + /** + * Reference trigger name. + */ + referenceName: string; +} + +/** + * The compute resource properties for managed integration runtime. + */ +export interface IntegrationRuntimeComputeProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + */ + location?: string; + /** + * The node size requirement to managed integration runtime. + */ + nodeSize?: string; + /** + * The required number of nodes for managed integration runtime. + */ + numberOfNodes?: number; + /** + * Maximum parallel executions count per node for managed integration runtime. + */ + maxParallelExecutionsPerNode?: number; + /** + * Data flow properties for managed integration runtime. + */ + dataFlowProperties?: IntegrationRuntimeDataFlowProperties; + /** + * VNet properties for managed integration runtime. + */ + vNetProperties?: IntegrationRuntimeVNetProperties; +} + +/** + * Data flow properties for managed integration runtime. + */ +export interface IntegrationRuntimeDataFlowProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Compute type of the cluster which will execute data flow job. + */ + computeType?: DataFlowComputeType; + /** + * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + */ + coreCount?: number; + /** + * Time to live (in minutes) setting of the cluster which will execute data flow job. + */ + timeToLive?: number; +} + +/** + * VNet properties for managed integration runtime. + */ +export interface IntegrationRuntimeVNetProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The ID of the VNet that this integration runtime will join. + */ + vNetId?: string; + /** + * The name of the subnet this integration runtime will join. + */ + subnet?: string; + /** + * Resource IDs of the public IP addresses that this integration runtime will use. + */ + publicIPs?: string[]; +} + +/** + * SSIS properties for managed integration runtime. + */ +export interface IntegrationRuntimeSsisProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Catalog information for managed dedicated integration runtime. + */ + catalogInfo?: IntegrationRuntimeSsisCatalogInfo; + /** + * License type for bringing your own license scenario. + */ + licenseType?: IntegrationRuntimeLicenseType; + /** + * Custom setup script properties for a managed dedicated integration runtime. + */ + customSetupScriptProperties?: IntegrationRuntimeCustomSetupScriptProperties; + /** + * Data proxy properties for a managed dedicated integration runtime. + */ + dataProxyProperties?: IntegrationRuntimeDataProxyProperties; + /** + * The edition for the SSIS Integration Runtime + */ + edition?: IntegrationRuntimeEdition; + /** + * Custom setup without script properties for a SSIS integration runtime. + */ + expressCustomSetupProperties?: CustomSetupBase[]; +} + +/** + * Catalog information for managed dedicated integration runtime. + */ +export interface IntegrationRuntimeSsisCatalogInfo { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The catalog database server URL. + */ + catalogServerEndpoint?: string; + /** + * The administrator user name of catalog database. + */ + catalogAdminUserName?: string; + /** + * The password of the administrator user account of the catalog database. + */ + catalogAdminPassword?: SecureString; + /** + * The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/ + */ + catalogPricingTier?: IntegrationRuntimeSsisCatalogPricingTier; +} + +/** + * Custom setup script properties for a managed dedicated integration runtime. + */ +export interface IntegrationRuntimeCustomSetupScriptProperties { + /** + * The URI of the Azure blob container that contains the custom setup script. + */ + blobContainerUri?: string; + /** + * The SAS token of the Azure blob container. + */ + sasToken?: SecureString; +} + +/** + * Data proxy properties for a managed dedicated integration runtime. + */ +export interface IntegrationRuntimeDataProxyProperties { + /** + * The self-hosted integration runtime reference. + */ + connectVia?: EntityReference; + /** + * The staging linked service reference. + */ + stagingLinkedService?: EntityReference; + /** + * The path to contain the staged data in the Blob storage. + */ + path?: string; +} + +/** + * The entity reference. + */ +export interface EntityReference { + /** + * The type of this referenced entity. + */ + type?: IntegrationRuntimeEntityReferenceType; + /** + * The name of this referenced entity. + */ + referenceName?: string; +} + +/** + * The base definition of the custom setup. + */ +export interface CustomSetupBase { + /** + * The type of custom setup. + */ + type: string; +} + +/** + * The base definition of a linked integration runtime. + */ +export interface LinkedIntegrationRuntimeType { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authorizationType: "Key" | "RBAC"; +} + +/** + * The storage account linked service. + */ +export type AzureStorageLinkedService = LinkedService & { + /** + * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of accountKey in connection string. + */ + accountKey?: AzureKeyVaultSecretReference; + /** + * SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + sasUri?: any; + /** + * The Azure key vault secret reference of sasToken in sas uri. + */ + sasToken?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: string; +}; + +/** + * The azure blob storage linked service. + */ +export type AzureBlobStorageLinkedService = LinkedService & { + /** + * The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of accountKey in connection string. + */ + accountKey?: AzureKeyVaultSecretReference; + /** + * SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + sasUri?: any; + /** + * The Azure key vault secret reference of sasToken in sas uri. + */ + sasToken?: AzureKeyVaultSecretReference; + /** + * Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. + */ + serviceEndpoint?: string; + /** + * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against Azure SQL Data Warehouse. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: string; +}; + +/** + * The azure table storage linked service. + */ +export type AzureTableStorageLinkedService = LinkedService & { + /** + * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of accountKey in connection string. + */ + accountKey?: AzureKeyVaultSecretReference; + /** + * SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + sasUri?: any; + /** + * The Azure key vault secret reference of sasToken in sas uri. + */ + sasToken?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: string; +}; + +/** + * Azure SQL Data Warehouse linked service. + */ +export type AzureSqlDWLinkedService = LinkedService & { + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against Azure SQL Data Warehouse. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * SQL Server linked service. + */ +export type SqlServerLinkedService = LinkedService & { + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The on-premises Windows authentication user name. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * The on-premises Windows authentication password. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Microsoft Azure SQL Database linked service. + */ +export type AzureSqlDatabaseLinkedService = LinkedService & { + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against Azure SQL Database. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure SQL Managed Instance linked service. + */ +export type AzureSqlMILinkedService = LinkedService & { + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against Azure SQL Managed Instance. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Batch linked service. + */ +export type AzureBatchLinkedService = LinkedService & { + /** + * The Azure Batch account name. Type: string (or Expression with resultType string). + */ + accountName: any; + /** + * The Azure Batch account access key. + */ + accessKey?: SecretBaseUnion; + /** + * The Azure Batch URI. Type: string (or Expression with resultType string). + */ + batchUri: any; + /** + * The Azure Batch pool name. Type: string (or Expression with resultType string). + */ + poolName: any; + /** + * The Azure Storage linked service reference. + */ + linkedServiceName: LinkedServiceReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Key Vault linked service. + */ +export type AzureKeyVaultLinkedService = LinkedService & { + /** + * The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). + */ + baseUrl: any; +}; + +/** + * Microsoft Azure Cosmos Database (CosmosDB) linked service. + */ +export type CosmosDbLinkedService = LinkedService & { + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string) + */ + accountEndpoint?: any; + /** + * The name of the database. Type: string (or Expression with resultType string) + */ + database?: any; + /** + * The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. + */ + accountKey?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Dynamics linked service. + */ +export type DynamicsLinkedService = LinkedService & { + /** + * The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). + */ + deploymentType: DynamicsDeploymentType; + /** + * The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). + */ + hostName?: string; + /** + * The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: string; + /** + * The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + */ + serviceUri?: string; + /** + * The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). + */ + organizationName?: string; + /** + * The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + */ + authenticationType: DynamicsAuthenticationType; + /** + * User name to access the Dynamics instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password to access the Dynamics instance. + */ + password?: SecretBaseUnion; + /** + * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + */ + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + /** + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. + */ + servicePrincipalCredential?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Dynamics CRM linked service. + */ +export type DynamicsCrmLinkedService = LinkedService & { + /** + * The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType string). + */ + deploymentType: DynamicsDeploymentType; + /** + * The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). + */ + hostName?: any; + /** + * The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + */ + serviceUri?: any; + /** + * The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). + */ + organizationName?: any; + /** + * The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + */ + authenticationType: DynamicsAuthenticationType; + /** + * User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password to access the Dynamics CRM instance. + */ + password?: SecretBaseUnion; + /** + * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + */ + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + /** + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. + */ + servicePrincipalCredential?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Common Data Service for Apps linked service. + */ +export type CommonDataServiceForAppsLinkedService = LinkedService & { + /** + * The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). + */ + deploymentType: DynamicsDeploymentType; + /** + * The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). + */ + hostName?: any; + /** + * The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + */ + serviceUri?: any; + /** + * The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). + */ + organizationName?: any; + /** + * The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + */ + authenticationType: DynamicsAuthenticationType; + /** + * User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password to access the Common Data Service for Apps instance. + */ + password?: SecretBaseUnion; + /** + * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + */ + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + /** + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. + */ + servicePrincipalCredential?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * HDInsight linked service. + */ +export type HDInsightLinkedService = LinkedService & { + /** + * HDInsight cluster URI. Type: string (or Expression with resultType string). + */ + clusterUri: any; + /** + * HDInsight cluster user name. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * HDInsight cluster password. + */ + password?: SecretBaseUnion; + /** + * The Azure Storage linked service reference. + */ + linkedServiceName?: LinkedServiceReference; + /** + * A reference to the Azure SQL linked service that points to the HCatalog database. + */ + hcatalogLinkedServiceName?: LinkedServiceReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. + */ + isEspEnabled?: any; + /** + * Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). + */ + fileSystem?: any; +}; + +/** + * File system linked service. + */ +export type FileServerLinkedService = LinkedService & { + /** + * Host name of the server. Type: string (or Expression with resultType string). + */ + host: any; + /** + * User ID to logon the server. Type: string (or Expression with resultType string). + */ + userId?: any; + /** + * Password to logon the server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure File Storage linked service. + */ +export type AzureFileStorageLinkedService = LinkedService & { + /** + * Host name of the server. Type: string (or Expression with resultType string). + */ + host: any; + /** + * User ID to logon the server. Type: string (or Expression with resultType string). + */ + userId?: any; + /** + * Password to logon the server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Google Cloud Storage. + */ +export type GoogleCloudStorageLinkedService = LinkedService & { + /** + * The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). + */ + accessKeyId?: any; + /** + * The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. + */ + secretAccessKey?: SecretBaseUnion; + /** + * This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). + */ + serviceUrl?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Oracle database. + */ +export type OracleLinkedService = LinkedService & { + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure MySQL database linked service. + */ +export type AzureMySqlLinkedService = LinkedService & { + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for MySQL data source. + */ +export type MySqlLinkedService = LinkedService & { + /** + * The connection string. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for PostgreSQL data source. + */ +export type PostgreSqlLinkedService = LinkedService & { + /** + * The connection string. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Sybase data source. + */ +export type SybaseLinkedService = LinkedService & { + /** + * Server name for connection. Type: string (or Expression with resultType string). + */ + server: any; + /** + * Database name for connection. Type: string (or Expression with resultType string). + */ + database: any; + /** + * Schema name for connection. Type: string (or Expression with resultType string). + */ + schema?: any; + /** + * AuthenticationType to be used for connection. + */ + authenticationType?: SybaseAuthenticationType; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for DB2 data source. + */ +export type Db2LinkedService = LinkedService & { + /** + * Server name for connection. Type: string (or Expression with resultType string). + */ + server: any; + /** + * Database name for connection. Type: string (or Expression with resultType string). + */ + database: any; + /** + * AuthenticationType to be used for connection. + */ + authenticationType?: Db2AuthenticationType; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * Under where packages are created when querying database. Type: string (or Expression with resultType string). + */ + packageCollection?: any; + /** + * Certificate Common Name when TLS is enabled. Type: string (or Expression with resultType string). + */ + certificateCommonName?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Teradata data source. + */ +export type TeradataLinkedService = LinkedService & { + /** + * Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * Server name for connection. Type: string (or Expression with resultType string). + */ + server?: any; + /** + * AuthenticationType to be used for connection. + */ + authenticationType?: TeradataAuthenticationType; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure ML Studio Web Service linked service. + */ +export type AzureMLLinkedService = LinkedService & { + /** + * The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). + */ + mlEndpoint: any; + /** + * The API key for accessing the Azure ML model endpoint. + */ + apiKey: SecretBaseUnion; + /** + * The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). + */ + updateResourceEndpoint?: any; + /** + * The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure ML Service linked service. + */ +export type AzureMLServiceLinkedService = LinkedService & { + /** + * Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). + */ + subscriptionId: any; + /** + * Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). + */ + resourceGroupName: any; + /** + * Azure ML Service workspace name. Type: string (or Expression with resultType string). + */ + mlWorkspaceName: any; + /** + * The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Open Database Connectivity (ODBC) linked service. + */ +export type OdbcLinkedService = LinkedService & { + /** + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The access credential portion of the connection string specified in driver-specific property-value format. + */ + credential?: SecretBaseUnion; + /** + * User name for Basic authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Informix linked service. + */ +export type InformixLinkedService = LinkedService & { + /** + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The access credential portion of the connection string specified in driver-specific property-value format. + */ + credential?: SecretBaseUnion; + /** + * User name for Basic authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Microsoft Access linked service. + */ +export type MicrosoftAccessLinkedService = LinkedService & { + /** + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The access credential portion of the connection string specified in driver-specific property-value format. + */ + credential?: SecretBaseUnion; + /** + * User name for Basic authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Hadoop Distributed File System (HDFS) linked service. + */ +export type HdfsLinkedService = LinkedService & { + /** + * The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). + */ + url: any; + /** + * Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * User name for Windows authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Windows authentication. + */ + password?: SecretBaseUnion; +}; + +/** + * Open Data Protocol (OData) linked service. + */ +export type ODataLinkedService = LinkedService & { + /** + * The URL of the OData service endpoint. Type: string (or Expression with resultType string). + */ + url: any; + /** + * Type of authentication used to connect to the OData service. + */ + authenticationType?: ODataAuthenticationType; + /** + * User name of the OData service. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password of the OData service. + */ + password?: SecretBaseUnion; + /** + * Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). + */ + aadResourceId?: any; + /** + * Specify the credential type (key or cert) is used for service principal. + */ + aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType; + /** + * Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). + */ + servicePrincipalEmbeddedCert?: SecretBaseUnion; + /** + * Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). + */ + servicePrincipalEmbeddedCertPassword?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Web linked service. + */ +export type WebLinkedService = LinkedService & { + /** + * Web linked service properties. + */ + typeProperties: WebLinkedServiceTypePropertiesUnion; +}; + +/** + * Linked service for Cassandra data source. + */ +export type CassandraLinkedService = LinkedService & { + /** + * Host name for connection. Type: string (or Expression with resultType string). + */ + host: any; + /** + * AuthenticationType to be used for connection. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The port for the connection. Type: integer (or Expression with resultType integer). + */ + port?: any; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for MongoDb data source. + */ +export type MongoDbLinkedService = LinkedService & { + /** + * The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). + */ + server: any; + /** + * The authentication type to be used to connect to the MongoDB database. + */ + authenticationType?: MongoDbAuthenticationType; + /** + * The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). + */ + databaseName: any; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * Database to verify the username and password. Type: string (or Expression with resultType string). + */ + authSource?: any; + /** + * The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableSsl?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for MongoDB data source. + */ +export type MongoDbV2LinkedService = LinkedService & { + /** + * The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). + */ + database: any; +}; + +/** + * Linked service for CosmosDB (MongoDB API) data source. + */ +export type CosmosDbMongoDbApiLinkedService = LinkedService & { + /** + * The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). + */ + database: any; +}; + +/** + * Azure Data Lake Store linked service. + */ +export type AzureDataLakeStoreLinkedService = LinkedService & { + /** + * Data Lake Store service URI. Type: string (or Expression with resultType string). + */ + dataLakeStoreUri: any; + /** + * The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The Key of the application used to authenticate against the Azure Data Lake Store account. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * Data Lake Store account name. Type: string (or Expression with resultType string). + */ + accountName?: any; + /** + * Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). + */ + subscriptionId?: any; + /** + * Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). + */ + resourceGroupName?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Data Lake Storage Gen2 linked service. + */ +export type AzureBlobFSLinkedService = LinkedService & { + /** + * Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). + */ + url: any; + /** + * Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). + */ + accountKey?: any; + /** + * The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Office365 linked service. + */ +export type Office365LinkedService = LinkedService & { + /** + * Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). + */ + office365TenantId: any; + /** + * Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). + */ + servicePrincipalTenantId: any; + /** + * Specify the application's client ID. Type: string (or Expression with resultType string). + */ + servicePrincipalId: any; + /** + * Specify the application's key. + */ + servicePrincipalKey: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Salesforce. + */ +export type SalesforceLinkedService = LinkedService & { + /** + * The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + */ + environmentUrl?: any; + /** + * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * The password for Basic authentication of the Salesforce instance. + */ + password?: SecretBaseUnion; + /** + * The security token is required to remotely access Salesforce instance. + */ + securityToken?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Salesforce Service Cloud. + */ +export type SalesforceServiceCloudLinkedService = LinkedService & { + /** + * The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + */ + environmentUrl?: any; + /** + * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * The password for Basic authentication of the Salesforce instance. + */ + password?: SecretBaseUnion; + /** + * The security token is required to remotely access Salesforce instance. + */ + securityToken?: SecretBaseUnion; + /** + * Extended properties appended to the connection string. Type: string (or Expression with resultType string). + */ + extendedProperties?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for SAP Cloud for Customer. + */ +export type SapCloudForCustomerLinkedService = LinkedService & { + /** + * The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). + */ + url: any; + /** + * The username for Basic authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * The password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for SAP ERP Central Component(SAP ECC). + */ +export type SapEccLinkedService = LinkedService & { + /** + * The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). + */ + url: string; + /** + * The username for Basic authentication. Type: string (or Expression with resultType string). + */ + username?: string; + /** + * The password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). + */ + encryptedCredential?: string; +}; + +/** + * SAP Business Warehouse Open Hub Destination Linked Service. + */ +export type SapOpenHubLinkedService = LinkedService & { + /** + * Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). + */ + server: any; + /** + * System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). + */ + systemNumber: any; + /** + * Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). + */ + clientId: any; + /** + * Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). + */ + language?: any; + /** + * Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to access the SAP BW server where the open hub destination is located. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Rest Service linked service. + */ +export type RestServiceLinkedService = LinkedService & { + /** + * The base URL of the REST service. + */ + url: any; + /** + * Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). + */ + enableServerCertificateValidation?: any; + /** + * Type of authentication used to connect to the REST service. + */ + authenticationType: RestServiceAuthenticationType; + /** + * The user name used in Basic authentication type. + */ + userName?: any; + /** + * The password used in Basic authentication type. + */ + password?: SecretBaseUnion; + /** + * The application's client ID used in AadServicePrincipal authentication type. + */ + servicePrincipalId?: any; + /** + * The application's key used in AadServicePrincipal authentication type. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. + */ + tenant?: any; + /** + * The resource you are requesting authorization to use. + */ + aadResourceId?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Amazon S3. + */ +export type AmazonS3LinkedService = LinkedService & { + /** + * The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). + */ + accessKeyId?: any; + /** + * The secret access key of the Amazon S3 Identity and Access Management (IAM) user. + */ + secretAccessKey?: SecretBaseUnion; + /** + * This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). + */ + serviceUrl?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Amazon Redshift. + */ +export type AmazonRedshiftLinkedService = LinkedService & { + /** + * The name of the Amazon Redshift server. Type: string (or Expression with resultType string). + */ + server: any; + /** + * The username of the Amazon Redshift source. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * The password of the Amazon Redshift source. + */ + password?: SecretBaseUnion; + /** + * The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). + */ + database: any; + /** + * The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). + */ + port?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Custom linked service. + */ +export type CustomDataSourceLinkedService = LinkedService & { + /** + * Custom linked service properties. + */ + typeProperties: any; +}; + +/** + * Linked service for Windows Azure Search Service. + */ +export type AzureSearchLinkedService = LinkedService & { + /** + * URL for Azure Search service. Type: string (or Expression with resultType string). + */ + url: any; + /** + * Admin Key for Azure Search service + */ + key?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for an HTTP source. + */ +export type HttpLinkedService = LinkedService & { + /** + * The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). + */ + url: any; + /** + * The authentication type to be used to connect to the HTTP server. + */ + authenticationType?: HttpAuthenticationType; + /** + * User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. + */ + password?: SecretBaseUnion; + /** + * Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). + */ + embeddedCertData?: any; + /** + * Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). + */ + certThumbprint?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). + */ + enableServerCertificateValidation?: any; +}; + +/** + * A FTP server Linked Service. + */ +export type FtpServerLinkedService = LinkedService & { + /** + * Host name of the FTP server. Type: string (or Expression with resultType string). + */ + host: any; + /** + * The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * The authentication type to be used to connect to the FTP server. + */ + authenticationType?: FtpAuthenticationType; + /** + * Username to logon the FTP server. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to logon the FTP server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). + */ + enableSsl?: any; + /** + * If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). + */ + enableServerCertificateValidation?: any; +}; + +/** + * A linked service for an SSH File Transfer Protocol (SFTP) server. + */ +export type SftpServerLinkedService = LinkedService & { + /** + * The SFTP server host name. Type: string (or Expression with resultType string). + */ + host: any; + /** + * The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * The authentication type to be used to connect to the FTP server. + */ + authenticationType?: SftpAuthenticationType; + /** + * The username used to log on to the SFTP server. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to logon the SFTP server for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). + */ + privateKeyPath?: any; + /** + * Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. + */ + privateKeyContent?: SecretBaseUnion; + /** + * The password to decrypt the SSH private key if the SSH private key is encrypted. + */ + passPhrase?: SecretBaseUnion; + /** + * If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + skipHostKeyValidation?: any; + /** + * The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). + */ + hostKeyFingerprint?: any; +}; + +/** + * SAP Business Warehouse Linked Service. + */ +export type SapBWLinkedService = LinkedService & { + /** + * Host name of the SAP BW instance. Type: string (or Expression with resultType string). + */ + server: any; + /** + * System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). + */ + systemNumber: any; + /** + * Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). + */ + clientId: any; + /** + * Username to access the SAP BW server. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to access the SAP BW server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * SAP HANA Linked Service. + */ +export type SapHanaLinkedService = LinkedService & { + /** + * SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * Host name of the SAP HANA server. Type: string (or Expression with resultType string). + */ + server: any; + /** + * The authentication type to be used to connect to the SAP HANA server. + */ + authenticationType?: SapHanaAuthenticationType; + /** + * Username to access the SAP HANA server. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to access the SAP HANA server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Amazon Marketplace Web Service linked service. + */ +export type AmazonMWSLinkedService = LinkedService & { + /** + * The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) + */ + endpoint: any; + /** + * The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + */ + marketplaceID: any; + /** + * The Amazon seller ID. + */ + sellerID: any; + /** + * The Amazon MWS authentication token. + */ + mwsAuthToken?: SecretBaseUnion; + /** + * The access key id used to access data. + */ + accessKeyId: any; + /** + * The secret key used to access data. + */ + secretKey?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure PostgreSQL linked service. + */ +export type AzurePostgreSqlLinkedService = LinkedService & { + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Concur Service linked service. + */ +export type ConcurLinkedService = LinkedService & { + /** + * Application client_id supplied by Concur App Management. + */ + clientId: any; + /** + * The user name that you use to access Concur Service. + */ + username: any; + /** + * The password corresponding to the user name that you provided in the username field. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Couchbase server linked service. + */ +export type CouchbaseLinkedService = LinkedService & { + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of credString in connection string. + */ + credString?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Drill server linked service. + */ +export type DrillLinkedService = LinkedService & { + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Eloqua server linked service. + */ +export type EloquaLinkedService = LinkedService & { + /** + * The endpoint of the Eloqua server. (i.e. eloqua.example.com) + */ + endpoint: any; + /** + * The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice) + */ + username: any; + /** + * The password corresponding to the user name. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Google BigQuery service linked service. + */ +export type GoogleBigQueryLinkedService = LinkedService & { + /** + * The default BigQuery project to query against. + */ + project: any; + /** + * A comma-separated list of public BigQuery projects to access. + */ + additionalProjects?: any; + /** + * Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. + */ + requestGoogleDriveScope?: any; + /** + * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. + */ + authenticationType: GoogleBigQueryAuthenticationType; + /** + * The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. + */ + refreshToken?: SecretBaseUnion; + /** + * The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). + */ + clientId?: any; + /** + * The client secret of the google application used to acquire the refresh token. + */ + clientSecret?: SecretBaseUnion; + /** + * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. + */ + email?: any; + /** + * The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. + */ + keyFilePath?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Greenplum Database linked service. + */ +export type GreenplumLinkedService = LinkedService & { + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * HBase server linked service. + */ +export type HBaseLinkedService = LinkedService & { + /** + * The IP address or host name of the HBase server. (i.e. 192.168.222.160) + */ + host: any; + /** + * The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. + */ + port?: any; + /** + * The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) + */ + httpPath?: any; + /** + * The authentication mechanism to use to connect to the HBase server. + */ + authenticationType: HBaseAuthenticationType; + /** + * The user name used to connect to the HBase instance. + */ + username?: any; + /** + * The password corresponding to the user name. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Hive Server linked service. + */ +export type HiveLinkedService = LinkedService & { + /** + * IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). + */ + host: any; + /** + * The TCP port that the Hive server uses to listen for client connections. + */ + port?: any; + /** + * The type of Hive server. + */ + serverType?: HiveServerType; + /** + * The transport protocol to use in the Thrift layer. + */ + thriftTransportProtocol?: HiveThriftTransportProtocol; + /** + * The authentication method used to access the Hive server. + */ + authenticationType: HiveAuthenticationType; + /** + * true to indicate using the ZooKeeper service, false not. + */ + serviceDiscoveryMode?: any; + /** + * The namespace on ZooKeeper under which Hive Server 2 nodes are added. + */ + zooKeeperNameSpace?: any; + /** + * Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. + */ + useNativeQuery?: any; + /** + * The user name that you use to access Hive Server. + */ + username?: any; + /** + * The password corresponding to the user name that you provided in the Username field + */ + password?: SecretBaseUnion; + /** + * The partial URL corresponding to the Hive server. + */ + httpPath?: any; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Hubspot Service linked service. + */ +export type HubspotLinkedService = LinkedService & { + /** + * The client ID associated with your Hubspot application. + */ + clientId: any; + /** + * The client secret associated with your Hubspot application. + */ + clientSecret?: SecretBaseUnion; + /** + * The access token obtained when initially authenticating your OAuth integration. + */ + accessToken?: SecretBaseUnion; + /** + * The refresh token obtained when initially authenticating your OAuth integration. + */ + refreshToken?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Impala server linked service. + */ +export type ImpalaLinkedService = LinkedService & { + /** + * The IP address or host name of the Impala server. (i.e. 192.168.222.160) + */ + host: any; + /** + * The TCP port that the Impala server uses to listen for client connections. The default value is 21050. + */ + port?: any; + /** + * The authentication type to use. + */ + authenticationType: ImpalaAuthenticationType; + /** + * The user name used to access the Impala server. The default value is anonymous when using SASLUsername. + */ + username?: any; + /** + * The password corresponding to the user name when using UsernameAndPassword. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Jira Service linked service. + */ +export type JiraLinkedService = LinkedService & { + /** + * The IP address or host name of the Jira service. (e.g. jira.example.com) + */ + host: any; + /** + * The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + */ + port?: any; + /** + * The user name that you use to access Jira Service. + */ + username: any; + /** + * The password corresponding to the user name that you provided in the username field. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Magento server linked service. + */ +export type MagentoLinkedService = LinkedService & { + /** + * The URL of the Magento instance. (i.e. 192.168.222.110/magento3) + */ + host: any; + /** + * The access token from Magento. + */ + accessToken?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * MariaDB server linked service. + */ +export type MariaDBLinkedService = LinkedService & { + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Database for MariaDB linked service. + */ +export type AzureMariaDBLinkedService = LinkedService & { + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Marketo server linked service. + */ +export type MarketoLinkedService = LinkedService & { + /** + * The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) + */ + endpoint: any; + /** + * The client Id of your Marketo service. + */ + clientId: any; + /** + * The client secret of your Marketo service. + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Paypal Service linked service. + */ +export type PaypalLinkedService = LinkedService & { + /** + * The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) + */ + host: any; + /** + * The client ID associated with your PayPal application. + */ + clientId: any; + /** + * The client secret associated with your PayPal application. + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Phoenix server linked service. + */ +export type PhoenixLinkedService = LinkedService & { + /** + * The IP address or host name of the Phoenix server. (i.e. 192.168.222.160) + */ + host: any; + /** + * The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. + */ + port?: any; + /** + * The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. + */ + httpPath?: any; + /** + * The authentication mechanism used to connect to the Phoenix server. + */ + authenticationType: PhoenixAuthenticationType; + /** + * The user name used to connect to the Phoenix server. + */ + username?: any; + /** + * The password corresponding to the user name. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Presto server linked service. + */ +export type PrestoLinkedService = LinkedService & { + /** + * The IP address or host name of the Presto server. (i.e. 192.168.222.160) + */ + host: any; + /** + * The version of the Presto server. (i.e. 0.148-t) + */ + serverVersion: any; + /** + * The catalog context for all request against the server. + */ + catalog: any; + /** + * The TCP port that the Presto server uses to listen for client connections. The default value is 8080. + */ + port?: any; + /** + * The authentication mechanism used to connect to the Presto server. + */ + authenticationType: PrestoAuthenticationType; + /** + * The user name used to connect to the Presto server. + */ + username?: any; + /** + * The password corresponding to the user name. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. + */ + timeZoneID?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * QuickBooks server linked service. + */ +export type QuickBooksLinkedService = LinkedService & { + /** + * The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) + */ + endpoint: any; + /** + * The company ID of the QuickBooks company to authorize. + */ + companyId: any; + /** + * The consumer key for OAuth 1.0 authentication. + */ + consumerKey: any; + /** + * The consumer secret for OAuth 1.0 authentication. + */ + consumerSecret: SecretBaseUnion; + /** + * The access token for OAuth 1.0 authentication. + */ + accessToken: SecretBaseUnion; + /** + * The access token secret for OAuth 1.0 authentication. + */ + accessTokenSecret: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * ServiceNow server linked service. + */ +export type ServiceNowLinkedService = LinkedService & { + /** + * The endpoint of the ServiceNow server. (i.e. .service-now.com) + */ + endpoint: any; + /** + * The authentication type to use. + */ + authenticationType: ServiceNowAuthenticationType; + /** + * The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. + */ + username?: any; + /** + * The password corresponding to the user name for Basic and OAuth2 authentication. + */ + password?: SecretBaseUnion; + /** + * The client id for OAuth2 authentication. + */ + clientId?: any; + /** + * The client secret for OAuth2 authentication. + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Shopify Service linked service. + */ +export type ShopifyLinkedService = LinkedService & { + /** + * The endpoint of the Shopify server. (i.e. mystore.myshopify.com) + */ + host: any; + /** + * The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. + */ + accessToken?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Spark Server linked service. + */ +export type SparkLinkedService = LinkedService & { + /** + * IP address or host name of the Spark server + */ + host: any; + /** + * The TCP port that the Spark server uses to listen for client connections. + */ + port: any; + /** + * The type of Spark server. + */ + serverType?: SparkServerType; + /** + * The transport protocol to use in the Thrift layer. + */ + thriftTransportProtocol?: SparkThriftTransportProtocol; + /** + * The authentication method used to access the Spark server. + */ + authenticationType: SparkAuthenticationType; + /** + * The user name that you use to access Spark Server. + */ + username?: any; + /** + * The password corresponding to the user name that you provided in the Username field + */ + password?: SecretBaseUnion; + /** + * The partial URL corresponding to the Spark server. + */ + httpPath?: any; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Square Service linked service. + */ +export type SquareLinkedService = LinkedService & { + /** + * The URL of the Square instance. (i.e. mystore.mysquare.com) + */ + host: any; + /** + * The client ID associated with your Square application. + */ + clientId: any; + /** + * The client secret associated with your Square application. + */ + clientSecret?: SecretBaseUnion; + /** + * The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500) + */ + redirectUri: any; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Xero Service linked service. + */ +export type XeroLinkedService = LinkedService & { + /** + * The endpoint of the Xero server. (i.e. api.xero.com) + */ + host: any; + /** + * The consumer key associated with the Xero application. + */ + consumerKey?: SecretBaseUnion; + /** + * The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( + * ). + */ + privateKey?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Zoho server linked service. + */ +export type ZohoLinkedService = LinkedService & { + /** + * The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) + */ + endpoint: any; + /** + * The access token for Zoho authentication. + */ + accessToken?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Vertica linked service. + */ +export type VerticaLinkedService = LinkedService & { + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Netezza linked service. + */ +export type NetezzaLinkedService = LinkedService & { + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Salesforce Marketing Cloud linked service. + */ +export type SalesforceMarketingCloudLinkedService = LinkedService & { + /** + * The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). + */ + clientId: any; + /** + * The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * HDInsight ondemand linked service. + */ +export type HDInsightOnDemandLinkedService = LinkedService & { + /** + * Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). + */ + clusterSize: any; + /** + * The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). + */ + timeToLive: any; + /** + * Version of the HDInsight cluster.  Type: string (or Expression with resultType string). + */ + version: any; + /** + * Azure Storage linked service to be used by the on-demand cluster for storing and processing data. + */ + linkedServiceName: LinkedServiceReference; + /** + * The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). + */ + hostSubscriptionId: any; + /** + * The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key for the service principal id. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant: any; + /** + * The resource group where the cluster belongs. Type: string (or Expression with resultType string). + */ + clusterResourceGroup: any; + /** + * The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). + */ + clusterNamePrefix?: any; + /** + * The username to access the cluster. Type: string (or Expression with resultType string). + */ + clusterUserName?: any; + /** + * The password to access the cluster. + */ + clusterPassword?: SecretBaseUnion; + /** + * The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). + */ + clusterSshUserName?: any; + /** + * The password to SSH remotely connect cluster’s node (for Linux). + */ + clusterSshPassword?: SecretBaseUnion; + /** + * Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. + */ + additionalLinkedServiceNames?: LinkedServiceReference[]; + /** + * The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. + */ + hcatalogLinkedServiceName?: LinkedServiceReference; + /** + * The cluster type. Type: string (or Expression with resultType string). + */ + clusterType?: any; + /** + * The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). + */ + sparkVersion?: any; + /** + * Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. + */ + coreConfiguration?: any; + /** + * Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. + */ + hBaseConfiguration?: any; + /** + * Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. + */ + hdfsConfiguration?: any; + /** + * Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. + */ + hiveConfiguration?: any; + /** + * Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. + */ + mapReduceConfiguration?: any; + /** + * Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. + */ + oozieConfiguration?: any; + /** + * Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. + */ + stormConfiguration?: any; + /** + * Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. + */ + yarnConfiguration?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * Specifies the size of the head node for the HDInsight cluster. + */ + headNodeSize?: any; + /** + * Specifies the size of the data node for the HDInsight cluster. + */ + dataNodeSize?: any; + /** + * Specifies the size of the Zoo Keeper node for the HDInsight cluster. + */ + zookeeperNodeSize?: any; + /** + * Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + */ + scriptActions?: ScriptAction[]; + /** + * The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). + */ + virtualNetworkId?: any; + /** + * The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). + */ + subnetName?: any; +}; + +/** + * Azure Data Lake Analytics linked service. + */ +export type AzureDataLakeAnalyticsLinkedService = LinkedService & { + /** + * The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). + */ + accountName: any; + /** + * The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The Key of the application used to authenticate against the Azure Data Lake Analytics account. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant: any; + /** + * Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). + */ + subscriptionId?: any; + /** + * Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). + */ + resourceGroupName?: any; + /** + * Azure Data Lake Analytics URI Type: string (or Expression with resultType string). + */ + dataLakeAnalyticsUri?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Databricks linked service. + */ +export type AzureDatabricksLinkedService = LinkedService & { + /** + * .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). + */ + domain: any; + /** + * Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). + */ + accessToken: SecretBaseUnion; + /** + * The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). + */ + existingClusterId?: any; + /** + * The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). + */ + instancePoolId?: any; + /** + * If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). + */ + newClusterVersion?: any; + /** + * If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). + */ + newClusterNumOfWorker?: any; + /** + * The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). + */ + newClusterNodeType?: any; + /** + * A set of optional, user-specified Spark configuration key-value pairs. + */ + newClusterSparkConf?: { [propertyName: string]: any }; + /** + * A set of optional, user-specified Spark environment variables key-value pairs. + */ + newClusterSparkEnvVars?: { [propertyName: string]: any }; + /** + * Additional tags for cluster resources. This property is ignored in instance pool configurations. + */ + newClusterCustomTags?: { [propertyName: string]: any }; + /** + * The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). + */ + newClusterDriverNodeType?: any; + /** + * User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). + */ + newClusterInitScripts?: any; + /** + * Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). + */ + newClusterEnableElasticDisk?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Responsys linked service. + */ +export type ResponsysLinkedService = LinkedService & { + /** + * The endpoint of the Responsys server. + */ + endpoint: any; + /** + * The client ID associated with the Responsys application. Type: string (or Expression with resultType string). + */ + clientId: any; + /** + * The client secret associated with the Responsys application. Type: string (or Expression with resultType string). + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Dynamics AX linked service. + */ +export type DynamicsAXLinkedService = LinkedService & { + /** + * The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. + */ + url: any; + /** + * Specify the application's client ID. Type: string (or Expression with resultType string). + */ + servicePrincipalId: any; + /** + * Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). + */ + servicePrincipalKey: SecretBaseUnion; + /** + * Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). + */ + tenant: any; + /** + * Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). + */ + aadResourceId: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Oracle Service Cloud linked service. + */ +export type OracleServiceCloudLinkedService = LinkedService & { + /** + * The URL of the Oracle Service Cloud instance. + */ + host: any; + /** + * The user name that you use to access Oracle Service Cloud server. + */ + username: any; + /** + * The password corresponding to the user name that you provided in the username key. + */ + password: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Google AdWords service linked service. + */ +export type GoogleAdWordsLinkedService = LinkedService & { + /** + * The Client customer ID of the AdWords account that you want to fetch report data for. + */ + clientCustomerID: any; + /** + * The developer token associated with the manager account that you use to grant access to the AdWords API. + */ + developerToken: SecretBaseUnion; + /** + * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. + */ + authenticationType: GoogleAdWordsAuthenticationType; + /** + * The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. + */ + refreshToken?: SecretBaseUnion; + /** + * The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). + */ + clientId?: any; + /** + * The client secret of the google application used to acquire the refresh token. + */ + clientSecret?: SecretBaseUnion; + /** + * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. + */ + email?: any; + /** + * The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. + */ + keyFilePath?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * SAP Table Linked Service. + */ +export type SapTableLinkedService = LinkedService & { + /** + * Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). + */ + server?: any; + /** + * System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). + */ + systemNumber?: any; + /** + * Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). + */ + clientId?: any; + /** + * Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). + */ + language?: any; + /** + * SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). + */ + systemId?: any; + /** + * Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to access the SAP server where the table is located. + */ + password?: SecretBaseUnion; + /** + * The hostname of the SAP Message Server. Type: string (or Expression with resultType string). + */ + messageServer?: any; + /** + * The service name or port number of the Message Server. Type: string (or Expression with resultType string). + */ + messageServerService?: any; + /** + * SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). + */ + sncMode?: any; + /** + * Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). + */ + sncMyName?: any; + /** + * Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). + */ + sncPartnerName?: any; + /** + * External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). + */ + sncLibraryPath?: any; + /** + * SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). + */ + sncQop?: any; + /** + * The Logon Group for the SAP System. Type: string (or Expression with resultType string). + */ + logonGroup?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Data Explorer (Kusto) linked service. + */ +export type AzureDataExplorerLinkedService = LinkedService & { + /** + * The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://..kusto.windows.net. Type: string (or Expression with resultType string) + */ + endpoint: any; + /** + * The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). + */ + servicePrincipalId: any; + /** + * The key of the service principal used to authenticate against Kusto. + */ + servicePrincipalKey: SecretBaseUnion; + /** + * Database name for connection. Type: string (or Expression with resultType string). + */ + database: any; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant: any; +}; + +/** + * Azure Function linked service. + */ +export type AzureFunctionLinkedService = LinkedService & { + /** + * The endpoint of the Azure Function App. URL will be in the format https://.azurewebsites.net. + */ + functionAppUrl: any; + /** + * Function or Host key for Azure Function App. + */ + functionKey?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * The resource model definition for an Azure Resource Manager resource with an etag. + */ +export type AzureEntityResource = Resource & { + /** + * Resource Etag. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly etag?: string; +}; + +/** + * A private endpoint connection + */ +export type PrivateEndpointConnection = Resource & { + /** + * The private endpoint which the connection belongs to. + */ + privateEndpoint?: PrivateEndpoint; + /** + * Connection state of the private endpoint connection. + */ + privateLinkServiceConnectionState?: PrivateLinkServiceConnectionState; + /** + * Provisioning state of the private endpoint connection. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly provisioningState?: string; +}; + +/** + * The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location' + */ +export type TrackedResource = Resource & { + /** + * Resource tags. + */ + tags?: { [propertyName: string]: string }; + /** + * The geo-location where the resource lives + */ + location: string; +}; + +/** + * The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location + */ +export type ProxyResource = Resource & {}; + +/** + * Avro dataset. + */ +export type AvroDataset = Dataset & { + /** + * The location of the avro storage. + */ + location?: DatasetLocationUnion; + avroCompressionCodec?: AvroCompressionCodec; + avroCompressionLevel?: number; +}; + +/** + * Parquet dataset. + */ +export type ParquetDataset = Dataset & { + /** + * The location of the parquet storage. + */ + location?: DatasetLocationUnion; + compressionCodec?: ParquetCompressionCodec; +}; + +/** + * Delimited text dataset. + */ +export type DelimitedTextDataset = Dataset & { + /** + * The location of the delimited text storage. + */ + location?: DatasetLocationUnion; + /** + * The column delimiter. Type: string (or Expression with resultType string). + */ + columnDelimiter?: any; + /** + * The row delimiter. Type: string (or Expression with resultType string). + */ + rowDelimiter?: any; + /** + * The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). + */ + encodingName?: any; + compressionCodec?: DelimitedTextCompressionCodec; + /** + * The data compression method used for DelimitedText. + */ + compressionLevel?: DatasetCompressionLevel; + /** + * The quote character. Type: string (or Expression with resultType string). + */ + quoteChar?: any; + /** + * The escape character. Type: string (or Expression with resultType string). + */ + escapeChar?: any; + /** + * When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). + */ + firstRowAsHeader?: any; + /** + * The null value string. Type: string (or Expression with resultType string). + */ + nullValue?: any; +}; + +/** + * Json dataset. + */ +export type JsonDataset = Dataset & { + /** + * The location of the json data storage. + */ + location?: DatasetLocationUnion; + /** + * The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). + */ + encodingName?: any; + /** + * The data compression method used for the json dataset. + */ + compression?: DatasetCompressionUnion; +}; + +/** + * ORC dataset. + */ +export type OrcDataset = Dataset & { + /** + * The location of the ORC data storage. + */ + location?: DatasetLocationUnion; + orcCompressionCodec?: OrcCompressionCodec; +}; + +/** + * Binary dataset. + */ +export type BinaryDataset = Dataset & { + /** + * The location of the Binary storage. + */ + location?: DatasetLocationUnion; + /** + * The data compression method used for the binary dataset. + */ + compression?: DatasetCompressionUnion; +}; + +/** + * The Azure Table storage dataset. + */ +export type AzureTableDataset = Dataset & { + /** + * The table name of the Azure Table storage. Type: string (or Expression with resultType string). + */ + tableName: any; +}; + +/** + * The Azure SQL Server database dataset. + */ +export type AzureSqlTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the Azure SQL database. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the Azure SQL database. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Azure SQL Managed Instance dataset. + */ +export type AzureSqlMITableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Azure SQL Data Warehouse dataset. + */ +export type AzureSqlDWTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Cassandra database dataset. + */ +export type CassandraTableDataset = Dataset & { + /** + * The table name of the Cassandra database. Type: string (or Expression with resultType string). + */ + tableName?: any; + /** + * The keyspace of the Cassandra database. Type: string (or Expression with resultType string). + */ + keyspace?: any; +}; + +/** + * The custom dataset. + */ +export type CustomDataset = Dataset & { + /** + * Custom dataset properties. + */ + typeProperties?: any; +}; + +/** + * Microsoft Azure CosmosDB (SQL API) Collection dataset. + */ +export type CosmosDbSqlApiCollectionDataset = Dataset & { + /** + * CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). + */ + collectionName: any; +}; + +/** + * Microsoft Azure Document Database Collection dataset. + */ +export type DocumentDbCollectionDataset = Dataset & { + /** + * Document Database collection name. Type: string (or Expression with resultType string). + */ + collectionName: any; +}; + +/** + * The Dynamics entity dataset. + */ +export type DynamicsEntityDataset = Dataset & { + /** + * The logical name of the entity. Type: string (or Expression with resultType string). + */ + entityName?: any; +}; + +/** + * The Dynamics CRM entity dataset. + */ +export type DynamicsCrmEntityDataset = Dataset & { + /** + * The logical name of the entity. Type: string (or Expression with resultType string). + */ + entityName?: any; +}; + +/** + * The Common Data Service for Apps entity dataset. + */ +export type CommonDataServiceForAppsEntityDataset = Dataset & { + /** + * The logical name of the entity. Type: string (or Expression with resultType string). + */ + entityName?: any; +}; + +/** + * The Office365 account. + */ +export type Office365Dataset = Dataset & { + /** + * Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). + */ + tableName: any; + /** + * A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). + */ + predicate?: any; +}; + +/** + * The MongoDB database dataset. + */ +export type MongoDbCollectionDataset = Dataset & { + /** + * The table name of the MongoDB database. Type: string (or Expression with resultType string). + */ + collectionName: any; +}; + +/** + * The MongoDB database dataset. + */ +export type MongoDbV2CollectionDataset = Dataset & { + /** + * The collection name of the MongoDB database. Type: string (or Expression with resultType string). + */ + collection: any; +}; + +/** + * The CosmosDB (MongoDB API) database dataset. + */ +export type CosmosDbMongoDbApiCollectionDataset = Dataset & { + /** + * The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). + */ + collection: any; +}; + +/** + * The Open Data Protocol (OData) resource dataset. + */ +export type ODataResourceDataset = Dataset & { + /** + * The OData resource path. Type: string (or Expression with resultType string). + */ + path?: any; +}; + +/** + * The on-premises Oracle database dataset. + */ +export type OracleTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Teradata database dataset. + */ +export type TeradataTableDataset = Dataset & { + /** + * The database name of Teradata. Type: string (or Expression with resultType string). + */ + database?: any; + /** + * The table name of Teradata. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Azure MySQL database dataset. + */ +export type AzureMySqlTableDataset = Dataset & { + /** + * The Azure MySQL database table name. Type: string (or Expression with resultType string). + */ + tableName?: any; + /** + * The name of Azure MySQL database table. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Amazon Redshift table dataset. + */ +export type AmazonRedshiftTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The Amazon Redshift table name. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The Amazon Redshift schema name. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * The Db2 table dataset. + */ +export type Db2TableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The Db2 schema name. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The Db2 table name. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The relational table dataset. + */ +export type RelationalTableDataset = Dataset & { + /** + * The relational table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The Informix table dataset. + */ +export type InformixTableDataset = Dataset & { + /** + * The Informix table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The ODBC table dataset. + */ +export type OdbcTableDataset = Dataset & { + /** + * The ODBC table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The MySQL table dataset. + */ +export type MySqlTableDataset = Dataset & { + /** + * The MySQL table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The PostgreSQL table dataset. + */ +export type PostgreSqlTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The PostgreSQL table name. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The PostgreSQL schema name. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * The Microsoft Access table dataset. + */ +export type MicrosoftAccessTableDataset = Dataset & { + /** + * The Microsoft Access table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The Salesforce object dataset. + */ +export type SalesforceObjectDataset = Dataset & { + /** + * The Salesforce object API name. Type: string (or Expression with resultType string). + */ + objectApiName?: any; +}; + +/** + * The Salesforce Service Cloud object dataset. + */ +export type SalesforceServiceCloudObjectDataset = Dataset & { + /** + * The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). + */ + objectApiName?: any; +}; + +/** + * The Sybase table dataset. + */ +export type SybaseTableDataset = Dataset & { + /** + * The Sybase table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The SAP BW cube dataset. + */ +export type SapBwCubeDataset = Dataset & {}; + +/** + * The path of the SAP Cloud for Customer OData entity. + */ +export type SapCloudForCustomerResourceDataset = Dataset & { + /** + * The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). + */ + path: any; +}; + +/** + * The path of the SAP ECC OData entity. + */ +export type SapEccResourceDataset = Dataset & { + /** + * The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). + */ + path: any; +}; + +/** + * SAP HANA Table properties. + */ +export type SapHanaTableDataset = Dataset & { + /** + * The schema name of SAP HANA. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of SAP HANA. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * Sap Business Warehouse Open Hub Destination Table properties. + */ +export type SapOpenHubTableDataset = Dataset & { + /** + * The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). + */ + openHubDestinationName: any; + /** + * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + excludeLastRequest?: any; + /** + * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). + */ + baseRequestId?: any; +}; + +/** + * The on-premises SQL Server dataset. + */ +export type SqlServerTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the SQL Server dataset. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * A Rest service dataset. + */ +export type RestResourceDataset = Dataset & { + /** + * The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). + */ + relativeUrl?: any; + /** + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). + */ + requestMethod?: any; + /** + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). + */ + requestBody?: any; + /** + * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). + */ + additionalHeaders?: any; + /** + * The pagination rules to compose next page requests. Type: string (or Expression with resultType string). + */ + paginationRules?: any; +}; + +/** + * SAP Table Resource properties. + */ +export type SapTableResourceDataset = Dataset & { + /** + * The name of the SAP Table. Type: string (or Expression with resultType string). + */ + tableName: any; +}; + +/** + * The dataset points to a HTML table in the web page. + */ +export type WebTableDataset = Dataset & { + /** + * The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. + */ + index: any; + /** + * The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). + */ + path?: any; +}; + +/** + * The Azure Search Index. + */ +export type AzureSearchIndexDataset = Dataset & { + /** + * The name of the Azure Search Index. Type: string (or Expression with resultType string). + */ + indexName: any; +}; + +/** + * Amazon Marketplace Web Service dataset. + */ +export type AmazonMWSObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Azure PostgreSQL dataset. + */ +export type AzurePostgreSqlTableDataset = Dataset & { + /** + * The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). + */ + tableName?: any; + /** + * The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Concur Service dataset. + */ +export type ConcurObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Couchbase server dataset. + */ +export type CouchbaseTableDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Drill server dataset. + */ +export type DrillTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Drill. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Drill. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Eloqua server dataset. + */ +export type EloquaObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Google BigQuery service dataset. + */ +export type GoogleBigQueryObjectDataset = Dataset & { + /** + * This property will be retired. Please consider using database + table properties instead. + */ + tableName?: any; + /** + * The table name of the Google BigQuery. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The database name of the Google BigQuery. Type: string (or Expression with resultType string). + */ + dataset?: any; +}; + +/** + * Greenplum Database dataset. + */ +export type GreenplumTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of Greenplum. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of Greenplum. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * HBase server dataset. + */ +export type HBaseObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Hive Server dataset. + */ +export type HiveObjectDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Hive. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Hive. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Hubspot Service dataset. + */ +export type HubspotObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Impala server dataset. + */ +export type ImpalaObjectDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Impala. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Impala. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Jira Service dataset. + */ +export type JiraObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Magento server dataset. + */ +export type MagentoObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * MariaDB server dataset. + */ +export type MariaDBTableDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Azure Database for MariaDB dataset. + */ +export type AzureMariaDBTableDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Marketo server dataset. + */ +export type MarketoObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Paypal Service dataset. + */ +export type PaypalObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Phoenix server dataset. + */ +export type PhoenixObjectDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Phoenix. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Phoenix. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Presto server dataset. + */ +export type PrestoObjectDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Presto. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Presto. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * QuickBooks server dataset. + */ +export type QuickBooksObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * ServiceNow server dataset. + */ +export type ServiceNowObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Shopify Service dataset. + */ +export type ShopifyObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Spark Server dataset. + */ +export type SparkObjectDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Spark. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Spark. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Square Service dataset. + */ +export type SquareObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Xero Service dataset. + */ +export type XeroObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Zoho server dataset. + */ +export type ZohoObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Netezza dataset. + */ +export type NetezzaTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Netezza. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Netezza. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Vertica dataset. + */ +export type VerticaTableDataset = Dataset & { + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Vertica. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Vertica. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Salesforce Marketing Cloud dataset. + */ +export type SalesforceMarketingCloudObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Responsys dataset. + */ +export type ResponsysObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The path of the Dynamics AX OData entity. + */ +export type DynamicsAXResourceDataset = Dataset & { + /** + * The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). + */ + path: any; +}; + +/** + * Oracle Service Cloud dataset. + */ +export type OracleServiceCloudObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The Azure Data Explorer (Kusto) dataset. + */ +export type AzureDataExplorerTableDataset = Dataset & { + /** + * The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * Google AdWords service dataset. + */ +export type GoogleAdWordsObjectDataset = Dataset & { + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Base class for all control activities like IfCondition, ForEach , Until. + */ +export type ControlActivity = Activity & {}; + +/** + * Base class for all execution activities. + */ +export type ExecutionActivity = Activity & { + /** + * Linked service reference. + */ + linkedServiceName?: LinkedServiceReference; + /** + * Activity policy. + */ + policy?: ActivityPolicy; +}; + +/** + * Execute pipeline activity. + */ +export type ExecutePipelineActivity = Activity & { + /** + * Pipeline reference. + */ + pipeline: PipelineReference; + /** + * Pipeline parameters. + */ + parameters?: { [propertyName: string]: any }; + /** + * Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. + */ + waitOnCompletion?: boolean; +}; + +/** + * This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. + */ +export type IfConditionActivity = Activity & { + /** + * An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. + */ + expression: Expression; + /** + * List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. + */ + ifTrueActivities?: ActivityUnion[]; + /** + * List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. + */ + ifFalseActivities?: ActivityUnion[]; +}; + +/** + * This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. + */ +export type SwitchActivity = Activity & { + /** + * An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. + */ + on: Expression; + /** + * List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. + */ + cases?: SwitchCase[]; + /** + * List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. + */ + defaultActivities?: ActivityUnion[]; +}; + +/** + * This activity is used for iterating over a collection and execute given activities. + */ +export type ForEachActivity = Activity & { + /** + * Should the loop be executed in sequence or in parallel (max 50) + */ + isSequential?: boolean; + /** + * Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). + */ + batchCount?: number; + /** + * Collection to iterate. + */ + items: Expression; + /** + * List of activities to execute . + */ + activities: ActivityUnion[]; +}; + +/** + * This activity suspends pipeline execution for the specified interval. + */ +export type WaitActivity = Activity & { + /** + * Duration in seconds. + */ + waitTimeInSeconds: number; +}; + +/** + * This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. + */ +export type UntilActivity = Activity & { + /** + * An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true + */ + expression: Expression; + /** + * Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + timeout?: any; + /** + * List of activities to execute. + */ + activities: ActivityUnion[]; +}; + +/** + * This activity verifies that an external resource exists. + */ +export type ValidationActivity = Activity & { + /** + * Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + timeout?: any; + /** + * A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). + */ + sleep?: any; + /** + * Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). + */ + minimumSize?: any; + /** + * Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). + */ + childItems?: any; + /** + * Validation activity dataset reference. + */ + dataset: DatasetReference; +}; + +/** + * Filter and return results from input array based on the conditions. + */ +export type FilterActivity = Activity & { + /** + * Input array on which filter should be applied. + */ + items: Expression; + /** + * Condition to be used for filtering the input. + */ + condition: Expression; +}; + +/** + * Set value for a Variable. + */ +export type SetVariableActivity = Activity & { + /** + * Name of the variable whose value needs to be set. + */ + variableName?: string; + /** + * Value to be set. Could be a static value or Expression + */ + value?: any; +}; + +/** + * Append value for a Variable of type Array. + */ +export type AppendVariableActivity = Activity & { + /** + * Name of the variable whose value needs to be appended to. + */ + variableName?: string; + /** + * Value to be appended. Could be a static value or Expression + */ + value?: any; +}; + +/** + * WebHook activity. + */ +export type WebHookActivity = Activity & { + /** + * Rest API method for target endpoint. + */ + method: WebHookActivityMethod; + /** + * WebHook activity target endpoint and path. Type: string (or Expression with resultType string). + */ + url: any; + /** + * The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + timeout?: string; + /** + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). + */ + headers?: any; + /** + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). + */ + body?: any; + /** + * Authentication method used for calling the endpoint. + */ + authentication?: WebActivityAuthentication; + /** + * When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). + */ + reportStatusOnCallBack?: any; +}; + +/** + * Execute Synapse notebook activity. + */ +export type SynapseNotebookActivity = Activity & { + /** + * Synapse notebook reference. + */ + notebook: SynapseNotebookReference; + /** + * Notebook parameters. + */ + parameters?: { [propertyName: string]: any }; +}; + +/** + * Execute spark job activity. + */ +export type SynapseSparkJobDefinitionActivity = Activity & { + /** + * Synapse spark job reference. + */ + sparkJob: SynapseSparkJobReference; +}; + +/** + * Execute SQL pool stored procedure activity. + */ +export type SqlPoolStoredProcedureActivity = Activity & { + /** + * SQL pool stored procedure reference. + */ + sqlPool: SqlPoolReference; + /** + * Stored procedure name. Type: string (or Expression with resultType string). + */ + storedProcedureName: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +/** + * Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. + */ +export type RerunTumblingWindowTrigger = Trigger & { + /** + * The parent trigger reference. + */ + parentTrigger?: any; + /** + * The start time for the time period for which restatement is initiated. Only UTC time is currently supported. + */ + requestedStartTime: Date; + /** + * The end time for the time period for which restatement is initiated. Only UTC time is currently supported. + */ + requestedEndTime: Date; + /** + * The max number of parallel time windows (ready for execution) for which a rerun is triggered. + */ + maxConcurrency: number; +}; + +/** + * Base class for all triggers that support one to many model for trigger to pipeline. + */ +export type MultiplePipelineTrigger = Trigger & { + /** + * Pipelines that need to be started. + */ + pipelines?: TriggerPipelineReference[]; +}; + +/** + * Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). + */ +export type TumblingWindowTrigger = Trigger & { + /** + * Pipeline for which runs are created when an event is fired for trigger window that is ready. + */ + pipeline: TriggerPipelineReference; + /** + * The frequency of the time windows. + */ + frequency: TumblingWindowFrequency; + /** + * The interval of the time windows. The minimum interval allowed is 15 Minutes. + */ + interval: number; + /** + * The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. + */ + startTime: Date; + /** + * The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. + */ + endTime?: Date; + /** + * Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + delay?: any; + /** + * The max number of parallel time windows (ready for execution) for which a new run is triggered. + */ + maxConcurrency: number; + /** + * Retry policy that will be applied for failed pipeline runs. + */ + retryPolicy?: RetryPolicy; + /** + * Triggers that this trigger depends on. Only tumbling window triggers are supported. + */ + dependsOn?: DependencyReferenceUnion[]; +}; + +/** + * Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. + */ +export type ChainingTrigger = Trigger & { + /** + * Pipeline for which runs are created when all upstream pipelines complete successfully. + */ + pipeline: TriggerPipelineReference; + /** + * Upstream Pipelines. + */ + dependsOn: PipelineReference[]; + /** + * Run Dimension property that needs to be emitted by upstream pipelines. + */ + runDimension: string; +}; + +/** + * Mapping data flow. + */ +export type MappingDataFlow = DataFlow & { + /** + * List of sources in data flow. + */ + sources?: DataFlowSource[]; + /** + * List of sinks in data flow. + */ + sinks?: DataFlowSink[]; + /** + * List of transformations in data flow. + */ + transformations?: Transformation[]; + /** + * DataFlow script. + */ + script?: string; +}; + +/** + * Data flow debug resource. + */ +export type DataFlowDebugResource = SubResourceDebugResource & { + /** + * Data flow properties. + */ + properties: DataFlowUnion; +}; + +/** + * Dataset debug resource. + */ +export type DatasetDebugResource = SubResourceDebugResource & { + /** + * Dataset properties. + */ + properties: DatasetUnion; +}; + +/** + * Linked service debug resource. + */ +export type LinkedServiceDebugResource = SubResourceDebugResource & { + /** + * Properties of linked service. + */ + properties: LinkedServiceUnion; +}; + +/** + * Managed integration runtime, including managed elastic and managed dedicated integration runtimes. + */ +export type ManagedIntegrationRuntime = IntegrationRuntime & { + /** + * Integration runtime state, only valid for managed dedicated integration runtime. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly state?: IntegrationRuntimeState; + /** + * The compute resource for managed integration runtime. + */ + computeProperties?: IntegrationRuntimeComputeProperties; + /** + * SSIS properties for managed integration runtime. + */ + ssisProperties?: IntegrationRuntimeSsisProperties; +}; + +/** + * Self-hosted integration runtime. + */ +export type SelfHostedIntegrationRuntime = IntegrationRuntime & { + /** + * Linked integration runtime type from data factory + */ + linkedInfo?: LinkedIntegrationRuntimeTypeUnion; +}; + +/** + * Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. + */ +export type SecureString = SecretBase & { + /** + * Value of secure string. + */ + value: string; +}; + +/** + * Azure Key Vault secret reference. + */ +export type AzureKeyVaultSecretReference = SecretBase & { + /** + * The Azure Key Vault linked service reference. + */ + store: LinkedServiceReference; + /** + * The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). + */ + secretName: any; + /** + * The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). + */ + secretVersion?: any; +}; + +/** + * Transformation for data flow source. + */ +export type DataFlowSource = Transformation & { + /** + * Dataset reference. + */ + dataset?: DatasetReference; +}; + +/** + * Transformation for data flow sink. + */ +export type DataFlowSink = Transformation & { + /** + * Dataset reference. + */ + dataset?: DatasetReference; +}; + +/** + * The location of azure blob dataset. + */ +export type AzureBlobStorageLocation = DatasetLocation & { + /** + * Specify the container of azure blob. Type: string (or Expression with resultType string). + */ + container?: any; +}; + +/** + * The location of azure blobFS dataset. + */ +export type AzureBlobFSLocation = DatasetLocation & { + /** + * Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). + */ + fileSystem?: any; +}; + +/** + * The location of azure data lake store dataset. + */ +export type AzureDataLakeStoreLocation = DatasetLocation & {}; + +/** + * The location of amazon S3 dataset. + */ +export type AmazonS3Location = DatasetLocation & { + /** + * Specify the bucketName of amazon S3. Type: string (or Expression with resultType string) + */ + bucketName?: any; + /** + * Specify the version of amazon S3. Type: string (or Expression with resultType string). + */ + version?: any; +}; + +/** + * The location of file server dataset. + */ +export type FileServerLocation = DatasetLocation & {}; + +/** + * The location of file server dataset. + */ +export type AzureFileStorageLocation = DatasetLocation & {}; + +/** + * The location of Google Cloud Storage dataset. + */ +export type GoogleCloudStorageLocation = DatasetLocation & { + /** + * Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string) + */ + bucketName?: any; + /** + * Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). + */ + version?: any; +}; + +/** + * The location of ftp server dataset. + */ +export type FtpServerLocation = DatasetLocation & {}; + +/** + * The location of SFTP dataset. + */ +export type SftpLocation = DatasetLocation & {}; + +/** + * The location of http server. + */ +export type HttpServerLocation = DatasetLocation & { + /** + * Specify the relativeUrl of http server. Type: string (or Expression with resultType string) + */ + relativeUrl?: any; +}; + +/** + * The location of HDFS. + */ +export type HdfsLocation = DatasetLocation & {}; + +/** + * The data stored in text format. + */ +export type TextFormat = DatasetStorageFormat & { + /** + * The column delimiter. Type: string (or Expression with resultType string). + */ + columnDelimiter?: any; + /** + * The row delimiter. Type: string (or Expression with resultType string). + */ + rowDelimiter?: any; + /** + * The escape character. Type: string (or Expression with resultType string). + */ + escapeChar?: any; + /** + * The quote character. Type: string (or Expression with resultType string). + */ + quoteChar?: any; + /** + * The null value string. Type: string (or Expression with resultType string). + */ + nullValue?: any; + /** + * The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). + */ + encodingName?: any; + /** + * Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + treatEmptyAsNull?: any; + /** + * The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). + */ + skipLineCount?: any; + /** + * When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). + */ + firstRowAsHeader?: any; +}; + +/** + * The data stored in JSON format. + */ +export type JsonFormat = DatasetStorageFormat & { + /** + * File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + */ + filePattern?: JsonFormatFilePattern; + /** + * The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). + */ + nestingSeparator?: any; + /** + * The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). + */ + encodingName?: any; + /** + * The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). + */ + jsonNodeReference?: any; + /** + * The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). + */ + jsonPathDefinition?: any; +}; + +/** + * The data stored in Avro format. + */ +export type AvroFormat = DatasetStorageFormat & {}; + +/** + * The data stored in Optimized Row Columnar (ORC) format. + */ +export type OrcFormat = DatasetStorageFormat & {}; + +/** + * The data stored in Parquet format. + */ +export type ParquetFormat = DatasetStorageFormat & {}; + +/** + * The BZip2 compression method used on a dataset. + */ +export type DatasetBZip2Compression = DatasetCompression & {}; + +/** + * The GZip compression method used on a dataset. + */ +export type DatasetGZipCompression = DatasetCompression & { + /** + * The GZip compression level. + */ + level?: DatasetCompressionLevel; +}; + +/** + * The Deflate compression method used on a dataset. + */ +export type DatasetDeflateCompression = DatasetCompression & { + /** + * The Deflate compression level. + */ + level?: DatasetCompressionLevel; +}; + +/** + * The ZipDeflate compression method used on a dataset. + */ +export type DatasetZipDeflateCompression = DatasetCompression & { + /** + * The ZipDeflate compression level. + */ + level?: DatasetCompressionLevel; +}; + +/** + * A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. + */ +export type WebAnonymousAuthentication = WebLinkedServiceTypeProperties & {}; + +/** + * A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. + */ +export type WebBasicAuthentication = WebLinkedServiceTypeProperties & { + /** + * User name for Basic authentication. Type: string (or Expression with resultType string). + */ + username: any; + /** + * The password for Basic authentication. + */ + password: SecretBaseUnion; +}; + +/** + * A WebLinkedService that uses client certificate based authentication to communicate with an HTTP endpoint. This scheme follows mutual authentication; the server must also provide valid credentials to the client. + */ +export type WebClientCertificateAuthentication = WebLinkedServiceTypeProperties & { + /** + * Base64-encoded contents of a PFX file. + */ + pfx: SecretBaseUnion; + /** + * Password for the PFX file. + */ + password: SecretBaseUnion; +}; + +/** + * Azure blob read settings. + */ +export type AzureBlobStorageReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Azure blob wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). + */ + prefix?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Azure blobFS read settings. + */ +export type AzureBlobFSReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Azure data lake store read settings. + */ +export type AzureDataLakeStoreReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * ADLS wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * ADLS wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Azure data lake store read settings. + */ +export type AmazonS3ReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * The prefix filter for the S3 object name. Type: string (or Expression with resultType string). + */ + prefix?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * File server read settings. + */ +export type FileServerReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * FileServer wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * FileServer wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Azure File Storage read settings. + */ +export type AzureFileStorageReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Google Cloud Storage read settings. + */ +export type GoogleCloudStorageReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). + */ + prefix?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Ftp read settings. + */ +export type FtpReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Ftp wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Ftp wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Specify whether to use binary transfer mode for FTP stores. + */ + useBinaryTransfer?: boolean; +}; + +/** + * Sftp read settings. + */ +export type SftpReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Sftp wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Sftp wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Sftp read settings. + */ +export type HttpReadSettings = StoreReadSettings & { + /** + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). + */ + requestMethod?: any; + /** + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). + */ + requestBody?: any; + /** + * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). + */ + additionalHeaders?: any; + /** + * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. + */ + requestTimeout?: any; +}; + +/** + * HDFS read settings. + */ +export type HdfsReadSettings = StoreReadSettings & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * HDFS wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * HDFS wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; + /** + * Specifies Distcp-related settings. + */ + distcpSettings?: DistcpSettings; +}; + +/** + * Sftp write settings. + */ +export type SftpWriteSettings = StoreWriteSettings & { + /** + * Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). + */ + operationTimeout?: any; +}; + +/** + * Azure blob write settings. + */ +export type AzureBlobStorageWriteSettings = StoreWriteSettings & { + /** + * Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). + */ + blockSizeInMB?: any; +}; + +/** + * Azure blobFS write settings. + */ +export type AzureBlobFSWriteSettings = StoreWriteSettings & { + /** + * Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). + */ + blockSizeInMB?: any; +}; + +/** + * Azure data lake store write settings. + */ +export type AzureDataLakeStoreWriteSettings = StoreWriteSettings & {}; + +/** + * File server write settings. + */ +export type FileServerWriteSettings = StoreWriteSettings & {}; + +/** + * Delimited text read settings. + */ +export type DelimitedTextReadSettings = FormatReadSettings & { + /** + * Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). + */ + skipLineCount?: any; +}; + +/** + * Avro write settings. + */ +export type AvroWriteSettings = FormatWriteSettings & { + /** + * Top level record name in write result, which is required in AVRO spec. + */ + recordName?: string; + /** + * Record namespace in the write result. + */ + recordNamespace?: string; +}; + +/** + * Delimited text write settings. + */ +export type DelimitedTextWriteSettings = FormatWriteSettings & { + /** + * Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). + */ + quoteAllText?: any; + /** + * The file extension used to create the files. Type: string (or Expression with resultType string). + */ + fileExtension: any; +}; + +/** + * Json write settings. + */ +export type JsonWriteSettings = FormatWriteSettings & { + /** + * File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + */ + filePattern?: JsonWriteFilePattern; +}; + +/** + * A copy activity Avro source. + */ +export type AvroSource = CopySource & { + /** + * Avro store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * A copy activity Parquet source. + */ +export type ParquetSource = CopySource & { + /** + * Parquet store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * A copy activity DelimitedText source. + */ +export type DelimitedTextSource = CopySource & { + /** + * DelimitedText store settings. + */ + storeSettings?: StoreReadSettingsUnion; + /** + * DelimitedText format settings. + */ + formatSettings?: DelimitedTextReadSettings; +}; + +/** + * A copy activity Json source. + */ +export type JsonSource = CopySource & { + /** + * Json store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * A copy activity ORC source. + */ +export type OrcSource = CopySource & { + /** + * ORC store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * A copy activity Binary source. + */ +export type BinarySource = CopySource & { + /** + * Binary store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * Copy activity sources of tabular type. + */ +export type TabularSource = CopySource & { + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; +}; + +/** + * A copy activity Azure Blob source. + */ +export type BlobSource = CopySource & { + /** + * Treat empty as null. Type: boolean (or Expression with resultType boolean). + */ + treatEmptyAsNull?: any; + /** + * Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). + */ + skipHeaderLineCount?: any; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; +}; + +/** + * A copy activity Document Database Collection source. + */ +export type DocumentDbCollectionSource = CopySource & { + /** + * Documents query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * Nested properties separator. Type: string (or Expression with resultType string). + */ + nestingSeparator?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; +}; + +/** + * A copy activity Azure CosmosDB (SQL API) Collection source. + */ +export type CosmosDbSqlApiSource = CopySource & { + /** + * SQL API query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * Page size of the result. Type: integer (or Expression with resultType integer). + */ + pageSize?: any; + /** + * Preferred regions. Type: array of strings (or Expression with resultType array of strings). + */ + preferredRegions?: any; +}; + +/** + * A copy activity Dynamics source. + */ +export type DynamicsSource = CopySource & { + /** + * FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Dynamics CRM source. + */ +export type DynamicsCrmSource = CopySource & { + /** + * FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Common Data Service for Apps source. + */ +export type CommonDataServiceForAppsSource = CopySource & { + /** + * FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for various relational databases. + */ +export type RelationalSource = CopySource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for Microsoft Access. + */ +export type MicrosoftAccessSource = CopySource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for OData source. + */ +export type ODataSource = CopySource & { + /** + * OData query. For example, "$top=1". Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Salesforce Service Cloud source. + */ +export type SalesforceServiceCloudSource = CopySource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The read behavior for the operation. Default is Query. + */ + readBehavior?: SalesforceSourceReadBehavior; +}; + +/** + * A copy activity Rest service source. + */ +export type RestSource = CopySource & { + /** + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). + */ + requestMethod?: any; + /** + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). + */ + requestBody?: any; + /** + * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). + */ + additionalHeaders?: any; + /** + * The pagination rules to compose next page requests. Type: string (or Expression with resultType string). + */ + paginationRules?: any; + /** + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + httpRequestTimeout?: any; + /** + * The time to await before sending next page request. + */ + requestInterval?: any; +}; + +/** + * A copy activity file system source. + */ +export type FileSystemSource = CopySource & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; +}; + +/** + * A copy activity HDFS source. + */ +export type HdfsSource = CopySource & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Specifies Distcp-related settings. + */ + distcpSettings?: DistcpSettings; +}; + +/** + * A copy activity Azure Data Explorer (Kusto) source. + */ +export type AzureDataExplorerSource = CopySource & { + /** + * Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). + */ + query: any; + /** + * The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. + */ + noTruncation?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + */ + queryTimeout?: any; +}; + +/** + * A copy activity Oracle source. + */ +export type OracleSource = CopySource & { + /** + * Oracle reader query. Type: string (or Expression with resultType string). + */ + oracleReaderQuery?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; + /** + * The partition mechanism that will be used for Oracle read in parallel. + */ + partitionOption?: OraclePartitionOption; + /** + * The settings that will be leveraged for Oracle source partitioning. + */ + partitionSettings?: OraclePartitionSettings; +}; + +/** + * A copy activity source for web page table. + */ +export type WebSource = CopySource & {}; + +/** + * A copy activity source for a MongoDB database. + */ +export type MongoDbSource = CopySource & { + /** + * Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for a MongoDB database. + */ +export type MongoDbV2Source = CopySource & { + /** + * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). + */ + filter?: any; + /** + * Cursor methods for Mongodb query + */ + cursorMethods?: MongoDbCursorMethodsProperties; + /** + * Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). + */ + batchSize?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; +}; + +/** + * A copy activity source for a CosmosDB (MongoDB API) database. + */ +export type CosmosDbMongoDbApiSource = CopySource & { + /** + * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). + */ + filter?: any; + /** + * Cursor methods for Mongodb query. + */ + cursorMethods?: MongoDbCursorMethodsProperties; + /** + * Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). + */ + batchSize?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; +}; + +/** + * A copy activity source for an Office 365 service. + */ +export type Office365Source = CopySource & { + /** + * The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). + */ + allowedGroups?: any; + /** + * The user scope uri. Type: string (or Expression with resultType string). + */ + userScopeFilterUri?: any; + /** + * The Column to apply the and . Type: string (or Expression with resultType string). + */ + dateFilterColumn?: any; + /** + * Start time of the requested range for this dataset. Type: string (or Expression with resultType string). + */ + startTime?: any; + /** + * End time of the requested range for this dataset. Type: string (or Expression with resultType string). + */ + endTime?: any; + /** + * The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] + */ + outputColumns?: any; +}; + +/** + * A copy activity Azure Data Lake source. + */ +export type AzureDataLakeStoreSource = CopySource & { + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; +}; + +/** + * A copy activity Azure BlobFS source. + */ +export type AzureBlobFSSource = CopySource & { + /** + * Treat empty as null. Type: boolean (or Expression with resultType boolean). + */ + treatEmptyAsNull?: any; + /** + * Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). + */ + skipHeaderLineCount?: any; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; +}; + +/** + * A copy activity source for an HTTP file. + */ +export type HttpSource = CopySource & { + /** + * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + httpRequestTimeout?: any; +}; + +/** + * A copy activity DelimitedText sink. + */ +export type DelimitedTextSink = CopySink & { + /** + * DelimitedText store settings. + */ + storeSettings?: StoreWriteSettingsUnion; + /** + * DelimitedText format settings. + */ + formatSettings?: DelimitedTextWriteSettings; +}; + +/** + * A copy activity Json sink. + */ +export type JsonSink = CopySink & { + /** + * Json store settings. + */ + storeSettings?: StoreWriteSettingsUnion; + /** + * Json format settings. + */ + formatSettings?: JsonWriteSettings; +}; + +/** + * A copy activity ORC sink. + */ +export type OrcSink = CopySink & { + /** + * ORC store settings. + */ + storeSettings?: StoreWriteSettingsUnion; +}; + +/** + * A copy activity Azure PostgreSQL sink. + */ +export type AzurePostgreSqlSink = CopySink & { + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Azure MySql sink. + */ +export type AzureMySqlSink = CopySink & { + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity SAP Cloud for Customer sink. + */ +export type SapCloudForCustomerSink = CopySink & { + /** + * The write behavior for the operation. Default is 'Insert'. + */ + writeBehavior?: SapCloudForCustomerSinkWriteBehavior; +}; + +/** + * A copy activity Azure Queue sink. + */ +export type AzureQueueSink = CopySink & {}; + +/** + * A copy activity Azure Table sink. + */ +export type AzureTableSink = CopySink & { + /** + * Azure Table default partition key value. Type: string (or Expression with resultType string). + */ + azureTableDefaultPartitionKeyValue?: any; + /** + * Azure Table partition key name. Type: string (or Expression with resultType string). + */ + azureTablePartitionKeyName?: any; + /** + * Azure Table row key name. Type: string (or Expression with resultType string). + */ + azureTableRowKeyName?: any; + /** + * Azure Table insert type. Type: string (or Expression with resultType string). + */ + azureTableInsertType?: any; +}; + +/** + * A copy activity Avro sink. + */ +export type AvroSink = CopySink & { + /** + * Avro store settings. + */ + storeSettings?: StoreWriteSettingsUnion; + /** + * Avro format settings. + */ + formatSettings?: AvroWriteSettings; +}; + +/** + * A copy activity Parquet sink. + */ +export type ParquetSink = CopySink & { + /** + * Parquet store settings. + */ + storeSettings?: StoreWriteSettingsUnion; +}; + +/** + * A copy activity Binary sink. + */ +export type BinarySink = CopySink & { + /** + * Binary store settings. + */ + storeSettings?: StoreWriteSettingsUnion; +}; + +/** + * A copy activity Azure Blob sink. + */ +export type BlobSink = CopySink & { + /** + * Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). + */ + blobWriterOverwriteFiles?: any; + /** + * Blob writer date time format. Type: string (or Expression with resultType string). + */ + blobWriterDateTimeFormat?: any; + /** + * Blob writer add header. Type: boolean (or Expression with resultType boolean). + */ + blobWriterAddHeader?: any; + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; +}; + +/** + * A copy activity file system sink. + */ +export type FileSystemSink = CopySink & { + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; +}; + +/** + * A copy activity Document Database Collection sink. + */ +export type DocumentDbCollectionSink = CopySink & { + /** + * Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). + */ + nestingSeparator?: any; + /** + * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. + */ + writeBehavior?: any; +}; + +/** + * A copy activity Azure CosmosDB (SQL API) Collection sink. + */ +export type CosmosDbSqlApiSink = CopySink & { + /** + * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. + */ + writeBehavior?: any; +}; + +/** + * A copy activity SQL sink. + */ +export type SqlSink = CopySink & { + /** + * SQL writer stored procedure name. Type: string (or Expression with resultType string). + */ + sqlWriterStoredProcedureName?: any; + /** + * SQL writer table type. Type: string (or Expression with resultType string). + */ + sqlWriterTableType?: any; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * SQL stored procedure parameters. + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). + */ + storedProcedureTableTypeParameterName?: any; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity SQL server sink. + */ +export type SqlServerSink = CopySink & { + /** + * SQL writer stored procedure name. Type: string (or Expression with resultType string). + */ + sqlWriterStoredProcedureName?: any; + /** + * SQL writer table type. Type: string (or Expression with resultType string). + */ + sqlWriterTableType?: any; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * SQL stored procedure parameters. + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). + */ + storedProcedureTableTypeParameterName?: any; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity Azure SQL sink. + */ +export type AzureSqlSink = CopySink & { + /** + * SQL writer stored procedure name. Type: string (or Expression with resultType string). + */ + sqlWriterStoredProcedureName?: any; + /** + * SQL writer table type. Type: string (or Expression with resultType string). + */ + sqlWriterTableType?: any; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * SQL stored procedure parameters. + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). + */ + storedProcedureTableTypeParameterName?: any; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity Azure SQL Managed Instance sink. + */ +export type SqlMISink = CopySink & { + /** + * SQL writer stored procedure name. Type: string (or Expression with resultType string). + */ + sqlWriterStoredProcedureName?: any; + /** + * SQL writer table type. Type: string (or Expression with resultType string). + */ + sqlWriterTableType?: any; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * SQL stored procedure parameters. + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). + */ + storedProcedureTableTypeParameterName?: any; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity SQL Data Warehouse sink. + */ +export type SqlDWSink = CopySink & { + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). + */ + allowPolyBase?: any; + /** + * Specifies PolyBase-related settings when allowPolyBase is true. + */ + polyBaseSettings?: PolybaseSettings; + /** + * Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). + */ + allowCopyCommand?: any; + /** + * Specifies Copy Command related settings when allowCopyCommand is true. + */ + copyCommandSettings?: DWCopyCommandSettings; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity Oracle sink. + */ +export type OracleSink = CopySink & { + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Azure Data Lake Store sink. + */ +export type AzureDataLakeStoreSink = CopySink & { + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; + /** + * Single File Parallel. + */ + enableAdlsSingleFileParallel?: any; +}; + +/** + * A copy activity Azure Data Lake Storage Gen2 sink. + */ +export type AzureBlobFSSink = CopySink & { + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; +}; + +/** + * A copy activity Azure Search Index sink. + */ +export type AzureSearchIndexSink = CopySink & { + /** + * Specify the write behavior when upserting documents into Azure Search Index. + */ + writeBehavior?: AzureSearchIndexWriteBehaviorType; +}; + +/** + * A copy activity ODBC sink. + */ +export type OdbcSink = CopySink & { + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Informix sink. + */ +export type InformixSink = CopySink & { + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Microsoft Access sink. + */ +export type MicrosoftAccessSink = CopySink & { + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Dynamics sink. + */ +export type DynamicsSink = CopySink & { + /** + * The write behavior for the operation. + */ + writeBehavior: DynamicsSinkWriteBehavior; + /** + * The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; + /** + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). + */ + alternateKeyName?: any; +}; + +/** + * A copy activity Dynamics CRM sink. + */ +export type DynamicsCrmSink = CopySink & { + /** + * The write behavior for the operation. + */ + writeBehavior: DynamicsSinkWriteBehavior; + /** + * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; + /** + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). + */ + alternateKeyName?: any; +}; + +/** + * A copy activity Common Data Service for Apps sink. + */ +export type CommonDataServiceForAppsSink = CopySink & { + /** + * The write behavior for the operation. + */ + writeBehavior: DynamicsSinkWriteBehavior; + /** + * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; + /** + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). + */ + alternateKeyName?: any; +}; + +/** + * A copy activity Azure Data Explorer sink. + */ +export type AzureDataExplorerSink = CopySink & { + /** + * A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. + */ + ingestionMappingName?: any; + /** + * An explicit column mapping description provided in a json format. Type: string. + */ + ingestionMappingAsJson?: any; + /** + * If set to true, any aggregation will be skipped. Default is false. Type: boolean. + */ + flushImmediately?: any; +}; + +/** + * A copy activity Salesforce sink. + */ +export type SalesforceSink = CopySink & { + /** + * The write behavior for the operation. Default is Insert. + */ + writeBehavior?: SalesforceSinkWriteBehavior; + /** + * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). + */ + externalIdFieldName?: any; + /** + * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; +}; + +/** + * A copy activity Salesforce Service Cloud sink. + */ +export type SalesforceServiceCloudSink = CopySink & { + /** + * The write behavior for the operation. Default is Insert. + */ + writeBehavior?: SalesforceSinkWriteBehavior; + /** + * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). + */ + externalIdFieldName?: any; + /** + * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; +}; + +/** + * A copy activity sink for a CosmosDB (MongoDB API) database. + */ +export type CosmosDbMongoDbApiSink = CopySink & { + /** + * Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + */ + writeBehavior?: any; +}; + +/** + * A copy activity tabular translator. + */ +export type TabularTranslator = CopyTranslator & { + /** + * Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. + */ + columnMappings?: any; + /** + * The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. + */ + schemaMapping?: any; + /** + * The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). + */ + collectionReference?: any; + /** + * Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). + */ + mapComplexValuesToString?: any; + /** + * Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). + */ + mappings?: any; +}; + +/** + * Trigger referenced dependency. + */ +export type TriggerDependencyReference = DependencyReference & { + /** + * Referenced trigger. + */ + referenceTrigger: TriggerReference; +}; + +/** + * Self referenced tumbling window trigger dependency. + */ +export type SelfDependencyTumblingWindowTriggerReference = DependencyReference & { + /** + * Timespan applied to the start time of a tumbling window when evaluating dependency. + */ + offset: string; + /** + * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. + */ + size?: string; +}; + +/** + * The key authorization type integration runtime. + */ +export type LinkedIntegrationRuntimeKeyAuthorization = LinkedIntegrationRuntimeType & { + /** + * The key used for authorization. + */ + key: SecureString; +}; + +/** + * The role based access control (RBAC) authorization type integration runtime. + */ +export type LinkedIntegrationRuntimeRbacAuthorization = LinkedIntegrationRuntimeType & { + /** + * The resource identifier of the integration runtime to be shared. + */ + resourceId: string; +}; + +/** + * Linked service resource type. + */ +export type LinkedServiceResource = AzureEntityResource & { + /** + * Properties of linked service. + */ + properties: LinkedServiceUnion; +}; + +/** + * Dataset resource type. + */ +export type DatasetResource = AzureEntityResource & { + /** + * Dataset properties. + */ + properties: DatasetUnion; +}; + +/** + * Pipeline resource type. + */ +export type PipelineResource = AzureEntityResource & { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The description of the pipeline. + */ + description?: string; + /** + * List of activities in pipeline. + */ + activities?: ActivityUnion[]; + /** + * List of parameters for pipeline. + */ + parameters?: { [propertyName: string]: ParameterSpecification }; + /** + * List of variables for pipeline. + */ + variables?: { [propertyName: string]: VariableSpecification }; + /** + * The max number of concurrent runs for the pipeline. + */ + concurrency?: number; + /** + * List of tags that can be used for describing the Pipeline. + */ + annotations?: any[]; + /** + * Dimensions emitted by Pipeline. + */ + runDimensions?: { [propertyName: string]: any }; + /** + * The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + */ + folder?: PipelineFolder; +}; + +/** + * Trigger resource type. + */ +export type TriggerResource = AzureEntityResource & { + /** + * Properties of the trigger. + */ + properties: TriggerUnion; +}; + +/** + * Data flow resource type. + */ +export type DataFlowResource = AzureEntityResource & { + /** + * Data flow properties. + */ + properties: DataFlowUnion; +}; + +/** + * Spark job definition resource type. + */ +export type SparkJobDefinitionResource = AzureEntityResource & { + /** + * Properties of spark job definition. + */ + properties: SparkJobDefinition; +}; + +/** + * Integration runtime resource type. + */ +export type IntegrationRuntimeResource = AzureEntityResource & { + /** + * Integration runtime properties. + */ + properties: IntegrationRuntimeUnion; +}; + +/** + * Azure Synapse nested resource, which belongs to a workspace. + */ +export type SubResource = AzureEntityResource & {}; + +/** + * RerunTrigger resource type. + */ +export type RerunTriggerResource = AzureEntityResource & { + /** + * Properties of the rerun trigger. + */ + properties: RerunTumblingWindowTrigger; +}; + +/** + * A workspace + */ +export type Workspace = TrackedResource & { + /** + * Identity of the workspace + */ + identity?: ManagedIdentity; + /** + * Workspace default data lake storage account details + */ + defaultDataLakeStorage?: DataLakeStorageAccountDetails; + /** + * SQL administrator login password + */ + sqlAdministratorLoginPassword?: string; + /** + * Workspace managed resource group. The resource group name uniquely identifies the resource group within the user subscriptionId. The resource group name must be no longer than 90 characters long, and must be alphanumeric characters (Char.IsLetterOrDigit()) and '-', '_', '(', ')' and'.'. Note that the name cannot end with '.' + */ + managedResourceGroupName?: string; + /** + * Resource provisioning state + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly provisioningState?: string; + /** + * Login for workspace SQL active directory administrator + */ + sqlAdministratorLogin?: string; + /** + * Virtual Network profile + */ + virtualNetworkProfile?: VirtualNetworkProfile; + /** + * Connectivity endpoints + */ + connectivityEndpoints?: { [propertyName: string]: string }; + /** + * Setting this to 'default' will ensure that all compute for this workspace is in a virtual network managed on behalf of the user. + */ + managedVirtualNetwork?: string; + /** + * Private endpoint connections to the workspace + */ + privateEndpointConnections?: PrivateEndpointConnection[]; + /** + * The encryption details of the workspace + */ + encryption?: EncryptionDetails; + /** + * The workspace unique identifier + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly workspaceUID?: string; + /** + * Workspace level configs and feature flags + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly extraProperties?: { [propertyName: string]: any }; + /** + * Managed Virtual Network Settings + */ + managedVirtualNetworkSettings?: ManagedVirtualNetworkSettings; + /** + * Git integration settings + */ + workspaceRepositoryConfiguration?: WorkspaceRepositoryConfiguration; + /** + * Purview Configuration + */ + purviewConfiguration?: PurviewConfiguration; +}; + +/** + * A SQL Analytics pool + */ +export type SqlPool = TrackedResource & { + /** + * SQL pool SKU + */ + sku?: Sku; + /** + * Maximum size in bytes + */ + maxSizeBytes?: number; + /** + * Collation mode + */ + collation?: string; + /** + * Source database to create from + */ + sourceDatabaseId?: string; + /** + * Backup database to restore from + */ + recoverableDatabaseId?: string; + /** + * Resource state + */ + provisioningState?: string; + /** + * Resource status + */ + status?: string; + /** + * Snapshot time to restore + */ + restorePointInTime?: string; + /** + * What is this? + */ + createMode?: string; + /** + * Date the SQL pool was created + */ + creationDate?: Date; +}; + +/** + * A Big Data pool + */ +export type BigDataPoolResourceInfo = TrackedResource & { + /** + * The state of the Big Data pool. + */ + provisioningState?: string; + /** + * Auto-scaling properties + */ + autoScale?: AutoScaleProperties; + /** + * The time when the Big Data pool was created. + */ + creationDate?: Date; + /** + * Auto-pausing properties + */ + autoPause?: AutoPauseProperties; + /** + * Whether compute isolation is required or not. + */ + isComputeIsolationEnabled?: boolean; + /** + * Whether library requirements changed. + */ + haveLibraryRequirementsChanged?: boolean; + /** + * Whether session level packages enabled. + */ + sessionLevelPackagesEnabled?: boolean; + /** + * The Spark events folder + */ + sparkEventsFolder?: string; + /** + * The number of nodes in the Big Data pool. + */ + nodeCount?: number; + /** + * Library version requirements + */ + libraryRequirements?: LibraryRequirements; + /** + * Spark configuration file to specify additional properties + */ + sparkConfigProperties?: LibraryRequirements; + /** + * The Apache Spark version. + */ + sparkVersion?: string; + /** + * The default folder where Spark logs will be written. + */ + defaultSparkLogFolder?: string; + /** + * The level of compute power that each node in the Big Data pool has. + */ + nodeSize?: NodeSize; + /** + * The kind of nodes that the Big Data pool provides. + */ + nodeSizeFamily?: NodeSizeFamily; +}; + +/** + * Copy activity. + */ +export type CopyActivity = ExecutionActivity & { + /** + * List of inputs for the activity. + */ + inputs?: DatasetReference[]; + /** + * List of outputs for the activity. + */ + outputs?: DatasetReference[]; + /** + * Copy activity source. + */ + source: CopySourceUnion; + /** + * Copy activity sink. + */ + sink: CopySinkUnion; + /** + * Copy activity translator. If not specified, tabular translator is used. + */ + translator?: any; + /** + * Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableStaging?: any; + /** + * Specifies interim staging settings when EnableStaging is true. + */ + stagingSettings?: StagingSettings; + /** + * Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. + */ + parallelCopies?: any; + /** + * Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. + */ + dataIntegrationUnits?: any; + /** + * Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableSkipIncompatibleRow?: any; + /** + * Redirect incompatible row settings when EnableSkipIncompatibleRow is true. + */ + redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings; + /** + * Preserve Rules. + */ + preserveRules?: any[]; + /** + * Preserve rules. + */ + preserve?: any[]; +}; + +/** + * HDInsight Hive activity type. + */ +export type HDInsightHiveActivity = ExecutionActivity & { + /** + * Storage linked service references. + */ + storageLinkedServices?: LinkedServiceReference[]; + /** + * User specified arguments to HDInsightActivity. + */ + arguments?: any[]; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * Script path. Type: string (or Expression with resultType string). + */ + scriptPath?: any; + /** + * Script linked service reference. + */ + scriptLinkedService?: LinkedServiceReference; + /** + * Allows user to specify defines for Hive job request. + */ + defines?: { [propertyName: string]: any }; + /** + * User specified arguments under hivevar namespace. + */ + variables?: any[]; + /** + * Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) + */ + queryTimeout?: number; +}; + +/** + * HDInsight Pig activity type. + */ +export type HDInsightPigActivity = ExecutionActivity & { + /** + * Storage linked service references. + */ + storageLinkedServices?: LinkedServiceReference[]; + /** + * User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). + */ + arguments?: any; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * Script path. Type: string (or Expression with resultType string). + */ + scriptPath?: any; + /** + * Script linked service reference. + */ + scriptLinkedService?: LinkedServiceReference; + /** + * Allows user to specify defines for Pig job request. + */ + defines?: { [propertyName: string]: any }; +}; + +/** + * HDInsight MapReduce activity type. + */ +export type HDInsightMapReduceActivity = ExecutionActivity & { + /** + * Storage linked service references. + */ + storageLinkedServices?: LinkedServiceReference[]; + /** + * User specified arguments to HDInsightActivity. + */ + arguments?: any[]; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * Class name. Type: string (or Expression with resultType string). + */ + className: any; + /** + * Jar path. Type: string (or Expression with resultType string). + */ + jarFilePath: any; + /** + * Jar linked service reference. + */ + jarLinkedService?: LinkedServiceReference; + /** + * Jar libs. + */ + jarLibs?: any[]; + /** + * Allows user to specify defines for the MapReduce job request. + */ + defines?: { [propertyName: string]: any }; +}; + +/** + * HDInsight streaming activity type. + */ +export type HDInsightStreamingActivity = ExecutionActivity & { + /** + * Storage linked service references. + */ + storageLinkedServices?: LinkedServiceReference[]; + /** + * User specified arguments to HDInsightActivity. + */ + arguments?: any[]; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * Mapper executable name. Type: string (or Expression with resultType string). + */ + mapper: any; + /** + * Reducer executable name. Type: string (or Expression with resultType string). + */ + reducer: any; + /** + * Input blob path. Type: string (or Expression with resultType string). + */ + input: any; + /** + * Output blob path. Type: string (or Expression with resultType string). + */ + output: any; + /** + * Paths to streaming job files. Can be directories. + */ + filePaths: any[]; + /** + * Linked service reference where the files are located. + */ + fileLinkedService?: LinkedServiceReference; + /** + * Combiner executable name. Type: string (or Expression with resultType string). + */ + combiner?: any; + /** + * Command line environment values. + */ + commandEnvironment?: any[]; + /** + * Allows user to specify defines for streaming job request. + */ + defines?: { [propertyName: string]: any }; +}; + +/** + * HDInsight Spark activity. + */ +export type HDInsightSparkActivity = ExecutionActivity & { + /** + * The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). + */ + rootPath: any; + /** + * The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). + */ + entryFilePath: any; + /** + * The user-specified arguments to HDInsightSparkActivity. + */ + arguments?: any[]; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * The storage linked service for uploading the entry file and dependencies, and for receiving logs. + */ + sparkJobLinkedService?: LinkedServiceReference; + /** + * The application's Java/Spark main class. + */ + className?: string; + /** + * The user to impersonate that will execute the job. Type: string (or Expression with resultType string). + */ + proxyUser?: any; + /** + * Spark configuration property. + */ + sparkConfig?: { [propertyName: string]: any }; +}; + +/** + * Execute SSIS package activity. + */ +export type ExecuteSsisPackageActivity = ExecutionActivity & { + /** + * SSIS package location. + */ + packageLocation: SsisPackageLocation; + /** + * Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). + */ + runtime?: any; + /** + * The logging level of SSIS package execution. Type: string (or Expression with resultType string). + */ + loggingLevel?: any; + /** + * The environment path to execute the SSIS package. Type: string (or Expression with resultType string). + */ + environmentPath?: any; + /** + * The package execution credential. + */ + executionCredential?: SsisExecutionCredential; + /** + * The integration runtime reference. + */ + connectVia: IntegrationRuntimeReference; + /** + * The project level parameters to execute the SSIS package. + */ + projectParameters?: { [propertyName: string]: SsisExecutionParameter }; + /** + * The package level parameters to execute the SSIS package. + */ + packageParameters?: { [propertyName: string]: SsisExecutionParameter }; + /** + * The project level connection managers to execute the SSIS package. + */ + projectConnectionManagers?: { [propertyName: string]: any }; + /** + * The package level connection managers to execute the SSIS package. + */ + packageConnectionManagers?: { [propertyName: string]: any }; + /** + * The property overrides to execute the SSIS package. + */ + propertyOverrides?: { [propertyName: string]: SsisPropertyOverride }; + /** + * SSIS package execution log location. + */ + logLocation?: SsisLogLocation; +}; + +/** + * Custom activity type. + */ +export type CustomActivity = ExecutionActivity & { + /** + * Command for custom activity Type: string (or Expression with resultType string). + */ + command: any; + /** + * Resource linked service reference. + */ + resourceLinkedService?: LinkedServiceReference; + /** + * Folder path for resource files Type: string (or Expression with resultType string). + */ + folderPath?: any; + /** + * Reference objects + */ + referenceObjects?: CustomActivityReferenceObject; + /** + * User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. + */ + extendedProperties?: { [propertyName: string]: any }; + /** + * The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). + */ + retentionTimeInDays?: any; +}; + +/** + * SQL stored procedure activity type. + */ +export type SqlServerStoredProcedureActivity = ExecutionActivity & { + /** + * Stored procedure name. Type: string (or Expression with resultType string). + */ + storedProcedureName: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +/** + * Delete activity. + */ +export type DeleteActivity = ExecutionActivity & { + /** + * If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * The max concurrent connections to connect data source at the same time. + */ + maxConcurrentConnections?: number; + /** + * Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableLogging?: any; + /** + * Log storage settings customer need to provide when enableLogging is true. + */ + logStorageSettings?: LogStorageSettings; + /** + * Delete activity dataset reference. + */ + dataset: DatasetReference; +}; + +/** + * Azure Data Explorer command activity. + */ +export type AzureDataExplorerCommandActivity = ExecutionActivity & { + /** + * A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). + */ + command: any; + /** + * Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + */ + commandTimeout?: any; +}; + +/** + * Lookup activity. + */ +export type LookupActivity = ExecutionActivity & { + /** + * Dataset-specific source properties, same as copy activity source. + */ + source: CopySourceUnion; + /** + * Lookup activity dataset reference. + */ + dataset: DatasetReference; + /** + * Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). + */ + firstRowOnly?: any; +}; + +/** + * Web activity. + */ +export type WebActivity = ExecutionActivity & { + /** + * Rest API method for target endpoint. + */ + method: WebActivityMethod; + /** + * Web activity target endpoint and path. Type: string (or Expression with resultType string). + */ + url: any; + /** + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). + */ + headers?: any; + /** + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). + */ + body?: any; + /** + * Authentication method used for calling the endpoint. + */ + authentication?: WebActivityAuthentication; + /** + * List of datasets passed to web endpoint. + */ + datasets?: DatasetReference[]; + /** + * List of linked services passed to web endpoint. + */ + linkedServices?: LinkedServiceReference[]; + /** + * The integration runtime reference. + */ + connectVia?: IntegrationRuntimeReference; +}; + +/** + * Activity to get metadata of dataset + */ +export type GetMetadataActivity = ExecutionActivity & { + /** + * GetMetadata activity dataset reference. + */ + dataset: DatasetReference; + /** + * Fields of metadata to get from dataset. + */ + fieldList?: any[]; +}; + +/** + * Azure ML Batch Execution activity. + */ +export type AzureMLBatchExecutionActivity = ExecutionActivity & { + /** + * Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. + */ + globalParameters?: { [propertyName: string]: any }; + /** + * Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. + */ + webServiceOutputs?: { [propertyName: string]: AzureMLWebServiceFile }; + /** + * Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. + */ + webServiceInputs?: { [propertyName: string]: AzureMLWebServiceFile }; +}; + +/** + * Azure ML Update Resource management activity. + */ +export type AzureMLUpdateResourceActivity = ExecutionActivity & { + /** + * Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). + */ + trainedModelName: any; + /** + * Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. + */ + trainedModelLinkedServiceName: LinkedServiceReference; + /** + * The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). + */ + trainedModelFilePath: any; +}; + +/** + * Azure ML Execute Pipeline activity. + */ +export type AzureMLExecutePipelineActivity = ExecutionActivity & { + /** + * ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). + */ + mlPipelineId: any; + /** + * Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). + */ + experimentName?: any; + /** + * Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). + */ + mlPipelineParameters?: any; + /** + * The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). + */ + mlParentRunId?: any; + /** + * Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). + */ + continueOnStepFailure?: any; +}; + +/** + * Data Lake Analytics U-SQL activity. + */ +export type DataLakeAnalyticsUsqlActivity = ExecutionActivity & { + /** + * Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). + */ + scriptPath: any; + /** + * Script linked service reference. + */ + scriptLinkedService: LinkedServiceReference; + /** + * The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. + */ + degreeOfParallelism?: any; + /** + * Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. + */ + priority?: any; + /** + * Parameters for U-SQL job request. + */ + parameters?: { [propertyName: string]: any }; + /** + * Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). + */ + runtimeVersion?: any; + /** + * Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). + */ + compilationMode?: any; +}; + +/** + * DatabricksNotebook activity. + */ +export type DatabricksNotebookActivity = ExecutionActivity & { + /** + * The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). + */ + notebookPath: any; + /** + * Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. + */ + baseParameters?: { [propertyName: string]: any }; + /** + * A list of libraries to be installed on the cluster that will execute the job. + */ + libraries?: { [propertyName: string]: any }[]; +}; + +/** + * DatabricksSparkJar activity. + */ +export type DatabricksSparkJarActivity = ExecutionActivity & { + /** + * The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). + */ + mainClassName: any; + /** + * Parameters that will be passed to the main method. + */ + parameters?: any[]; + /** + * A list of libraries to be installed on the cluster that will execute the job. + */ + libraries?: { [propertyName: string]: any }[]; +}; + +/** + * DatabricksSparkPython activity. + */ +export type DatabricksSparkPythonActivity = ExecutionActivity & { + /** + * The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). + */ + pythonFile: any; + /** + * Command line parameters that will be passed to the Python file. + */ + parameters?: any[]; + /** + * A list of libraries to be installed on the cluster that will execute the job. + */ + libraries?: { [propertyName: string]: any }[]; +}; + +/** + * Azure Function activity. + */ +export type AzureFunctionActivity = ExecutionActivity & { + /** + * Rest API method for target endpoint. + */ + method: AzureFunctionActivityMethod; + /** + * Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string) + */ + functionName: any; + /** + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). + */ + headers?: any; + /** + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). + */ + body?: any; +}; + +/** + * Execute data flow activity. + */ +export type ExecuteDataFlowActivity = ExecutionActivity & { + /** + * Data flow reference. + */ + dataFlow: DataFlowReference; + /** + * Staging info for execute data flow activity. + */ + staging?: DataFlowStagingInfo; + /** + * The integration runtime reference. + */ + integrationRuntime?: IntegrationRuntimeReference; + /** + * Compute properties for data flow activity. + */ + compute?: ExecuteDataFlowActivityTypePropertiesCompute; +}; + +/** + * Trigger that creates pipeline runs periodically, on schedule. + */ +export type ScheduleTrigger = MultiplePipelineTrigger & { + /** + * Recurrence schedule configuration. + */ + recurrence: ScheduleTriggerRecurrence; +}; + +/** + * Trigger that runs every time the selected Blob container changes. + */ +export type BlobTrigger = MultiplePipelineTrigger & { + /** + * The path of the container/folder that will trigger the pipeline. + */ + folderPath: string; + /** + * The max number of parallel files to handle when it is triggered. + */ + maxConcurrency: number; + /** + * The Azure Storage linked service reference. + */ + linkedService: LinkedServiceReference; +}; + +/** + * Trigger that runs every time a Blob event occurs. + */ +export type BlobEventsTrigger = MultiplePipelineTrigger & { + /** + * The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + */ + blobPathBeginsWith?: string; + /** + * The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + */ + blobPathEndsWith?: string; + /** + * If set to true, blobs with zero bytes will be ignored. + */ + ignoreEmptyBlobs?: boolean; + /** + * The type of events that cause this trigger to fire. + */ + events: BlobEventTypes[]; + /** + * The ARM resource ID of the Storage Account. + */ + scope: string; +}; + +/** + * A copy activity Azure Table source. + */ +export type AzureTableSource = TabularSource & { + /** + * Azure Table source query. Type: string (or Expression with resultType string). + */ + azureTableSourceQuery?: any; + /** + * Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). + */ + azureTableSourceIgnoreTableNotFound?: any; +}; + +/** + * A copy activity source for Informix. + */ +export type InformixSource = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for Db2 databases. + */ +export type Db2Source = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for ODBC databases. + */ +export type OdbcSource = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for MySQL databases. + */ +export type MySqlSource = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for PostgreSQL databases. + */ +export type PostgreSqlSource = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for Sybase databases. + */ +export type SybaseSource = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for SapBW server via MDX. + */ +export type SapBwSource = TabularSource & { + /** + * MDX query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Salesforce source. + */ +export type SalesforceSource = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The read behavior for the operation. Default is Query. + */ + readBehavior?: SalesforceSourceReadBehavior; +}; + +/** + * A copy activity source for SAP Cloud for Customer source. + */ +export type SapCloudForCustomerSource = TabularSource & { + /** + * SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for SAP ECC source. + */ +export type SapEccSource = TabularSource & { + /** + * SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for SAP HANA source. + */ +export type SapHanaSource = TabularSource & { + /** + * SAP HANA Sql query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). + */ + packetSize?: any; + /** + * The partition mechanism that will be used for SAP HANA read in parallel. + */ + partitionOption?: SapHanaPartitionOption; + /** + * The settings that will be leveraged for SAP HANA source partitioning. + */ + partitionSettings?: SapHanaPartitionSettings; +}; + +/** + * A copy activity source for SAP Business Warehouse Open Hub Destination source. + */ +export type SapOpenHubSource = TabularSource & { + /** + * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + excludeLastRequest?: any; + /** + * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). + */ + baseRequestId?: any; +}; + +/** + * A copy activity source for SAP Table source. + */ +export type SapTableSource = TabularSource & { + /** + * The number of rows to be retrieved. Type: integer(or Expression with resultType integer). + */ + rowCount?: any; + /** + * The number of rows that will be skipped. Type: integer (or Expression with resultType integer). + */ + rowSkips?: any; + /** + * The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). + */ + rfcTableFields?: any; + /** + * The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). + */ + rfcTableOptions?: any; + /** + * Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + */ + batchSize?: any; + /** + * Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). + */ + customRfcReadTableFunctionModule?: any; + /** + * The partition mechanism that will be used for SAP table read in parallel. + */ + partitionOption?: SapTablePartitionOption; + /** + * The settings that will be leveraged for SAP table source partitioning. + */ + partitionSettings?: SapTablePartitionSettings; +}; + +/** + * A copy activity SQL source. + */ +export type SqlSource = TabularSource & { + /** + * SQL reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +/** + * A copy activity SQL server source. + */ +export type SqlServerSource = TabularSource & { + /** + * SQL reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * Which additional types to produce. + */ + produceAdditionalTypes?: any; +}; + +/** + * A copy activity Azure SQL source. + */ +export type AzureSqlSource = TabularSource & { + /** + * SQL reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * Which additional types to produce. + */ + produceAdditionalTypes?: any; +}; + +/** + * A copy activity Azure SQL Managed Instance source. + */ +export type SqlMISource = TabularSource & { + /** + * SQL reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * Which additional types to produce. + */ + produceAdditionalTypes?: any; +}; + +/** + * A copy activity SQL Data Warehouse source. + */ +export type SqlDWSource = TabularSource & { + /** + * SQL Data Warehouse reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. + */ + storedProcedureParameters?: any; +}; + +/** + * A copy activity Azure MySQL source. + */ +export type AzureMySqlSource = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Teradata source. + */ +export type TeradataSource = TabularSource & { + /** + * Teradata query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The partition mechanism that will be used for teradata read in parallel. + */ + partitionOption?: TeradataPartitionOption; + /** + * The settings that will be leveraged for teradata source partitioning. + */ + partitionSettings?: TeradataPartitionSettings; +}; + +/** + * A copy activity source for a Cassandra database. + */ +export type CassandraSource = TabularSource & { + /** + * Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + */ + consistencyLevel?: CassandraSourceReadConsistencyLevels; +}; + +/** + * A copy activity Amazon Marketplace Web Service source. + */ +export type AmazonMWSSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Azure PostgreSQL source. + */ +export type AzurePostgreSqlSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Concur Service source. + */ +export type ConcurSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Couchbase server source. + */ +export type CouchbaseSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Drill server source. + */ +export type DrillSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Eloqua server source. + */ +export type EloquaSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Google BigQuery service source. + */ +export type GoogleBigQuerySource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Greenplum Database source. + */ +export type GreenplumSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity HBase server source. + */ +export type HBaseSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Hive Server source. + */ +export type HiveSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Hubspot Service source. + */ +export type HubspotSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Impala server source. + */ +export type ImpalaSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Jira Service source. + */ +export type JiraSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Magento server source. + */ +export type MagentoSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity MariaDB server source. + */ +export type MariaDBSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Azure MariaDB source. + */ +export type AzureMariaDBSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Marketo server source. + */ +export type MarketoSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Paypal Service source. + */ +export type PaypalSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Phoenix server source. + */ +export type PhoenixSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Presto server source. + */ +export type PrestoSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity QuickBooks server source. + */ +export type QuickBooksSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity ServiceNow server source. + */ +export type ServiceNowSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Shopify Service source. + */ +export type ShopifySource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Spark Server source. + */ +export type SparkSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Square Service source. + */ +export type SquareSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Xero Service source. + */ +export type XeroSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Zoho server source. + */ +export type ZohoSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Netezza source. + */ +export type NetezzaSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The partition mechanism that will be used for Netezza read in parallel. + */ + partitionOption?: NetezzaPartitionOption; + /** + * The settings that will be leveraged for Netezza source partitioning. + */ + partitionSettings?: NetezzaPartitionSettings; +}; + +/** + * A copy activity Vertica source. + */ +export type VerticaSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Salesforce Marketing Cloud source. + */ +export type SalesforceMarketingCloudSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Responsys source. + */ +export type ResponsysSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Dynamics AX source. + */ +export type DynamicsAXSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Oracle Service Cloud source. + */ +export type OracleServiceCloudSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Google AdWords service source. + */ +export type GoogleAdWordsSource = TabularSource & { + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for Amazon Redshift Source. + */ +export type AmazonRedshiftSource = TabularSource & { + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. + */ + redshiftUnloadSettings?: RedshiftUnloadSettings; +}; + +/** + * Referenced tumbling window trigger dependency. + */ +export type TumblingWindowTriggerDependencyReference = TriggerDependencyReference & { + /** + * Timespan applied to the start time of a tumbling window when evaluating dependency. + */ + offset?: string; + /** + * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. + */ + size?: string; +}; + +/** + * Defines headers for DataFlowDebugSession_createDataFlowDebugSession operation. + */ +export interface DataFlowDebugSessionCreateDataFlowDebugSessionHeaders { + /** + * URI to poll for asynchronous operation status. + */ + location?: string; +} + +/** + * Defines headers for DataFlowDebugSession_executeCommand operation. + */ +export interface DataFlowDebugSessionExecuteCommandHeaders { + /** + * URI to poll for asynchronous operation status. + */ + location?: string; +} + +/** + * Known values of {@link IntegrationRuntimeReferenceType} that the service accepts. + */ +export const enum KnownIntegrationRuntimeReferenceType { + IntegrationRuntimeReference = "IntegrationRuntimeReference" +} + +/** + * Defines values for IntegrationRuntimeReferenceType. \ + * {@link KnownIntegrationRuntimeReferenceType} can be used interchangeably with IntegrationRuntimeReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **IntegrationRuntimeReference** + */ +export type IntegrationRuntimeReferenceType = string; + +/** + * Known values of {@link ParameterType} that the service accepts. + */ +export const enum KnownParameterType { + Object = "Object", + String = "String", + Int = "Int", + Float = "Float", + Bool = "Bool", + Array = "Array", + SecureString = "SecureString" +} + +/** + * Defines values for ParameterType. \ + * {@link KnownParameterType} can be used interchangeably with ParameterType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Object** \ + * **String** \ + * **Int** \ + * **Float** \ + * **Bool** \ + * **Array** \ + * **SecureString** + */ +export type ParameterType = string; + +/** + * Known values of {@link Type} that the service accepts. + */ +export const enum KnownType { + LinkedServiceReference = "LinkedServiceReference" +} + +/** + * Defines values for Type. \ + * {@link KnownType} can be used interchangeably with Type, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **LinkedServiceReference** + */ +export type Type = string; + +/** + * Known values of {@link DependencyCondition} that the service accepts. + */ +export const enum KnownDependencyCondition { + Succeeded = "Succeeded", + Failed = "Failed", + Skipped = "Skipped", + Completed = "Completed" +} + +/** + * Defines values for DependencyCondition. \ + * {@link KnownDependencyCondition} can be used interchangeably with DependencyCondition, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Succeeded** \ + * **Failed** \ + * **Skipped** \ + * **Completed** + */ +export type DependencyCondition = string; + +/** + * Known values of {@link VariableType} that the service accepts. + */ +export const enum KnownVariableType { + String = "String", + Bool = "Bool", + Boolean = "Boolean", + Array = "Array" +} + +/** + * Defines values for VariableType. \ + * {@link KnownVariableType} can be used interchangeably with VariableType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **String** \ + * **Bool** \ + * **Boolean** \ + * **Array** + */ +export type VariableType = string; + +/** + * Known values of {@link RunQueryFilterOperand} that the service accepts. + */ +export const enum KnownRunQueryFilterOperand { + PipelineName = "PipelineName", + Status = "Status", + RunStart = "RunStart", + RunEnd = "RunEnd", + ActivityName = "ActivityName", + ActivityRunStart = "ActivityRunStart", + ActivityRunEnd = "ActivityRunEnd", + ActivityType = "ActivityType", + TriggerName = "TriggerName", + TriggerRunTimestamp = "TriggerRunTimestamp", + RunGroupId = "RunGroupId", + LatestOnly = "LatestOnly" +} + +/** + * Defines values for RunQueryFilterOperand. \ + * {@link KnownRunQueryFilterOperand} can be used interchangeably with RunQueryFilterOperand, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **PipelineName** \ + * **Status** \ + * **RunStart** \ + * **RunEnd** \ + * **ActivityName** \ + * **ActivityRunStart** \ + * **ActivityRunEnd** \ + * **ActivityType** \ + * **TriggerName** \ + * **TriggerRunTimestamp** \ + * **RunGroupId** \ + * **LatestOnly** + */ +export type RunQueryFilterOperand = string; + +/** + * Known values of {@link RunQueryFilterOperator} that the service accepts. + */ +export const enum KnownRunQueryFilterOperator { + Equals = "Equals", + NotEquals = "NotEquals", + In = "In", + NotIn = "NotIn" +} + +/** + * Defines values for RunQueryFilterOperator. \ + * {@link KnownRunQueryFilterOperator} can be used interchangeably with RunQueryFilterOperator, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Equals** \ + * **NotEquals** \ + * **In** \ + * **NotIn** + */ +export type RunQueryFilterOperator = string; + +/** + * Known values of {@link RunQueryOrderByField} that the service accepts. + */ +export const enum KnownRunQueryOrderByField { + RunStart = "RunStart", + RunEnd = "RunEnd", + PipelineName = "PipelineName", + Status = "Status", + ActivityName = "ActivityName", + ActivityRunStart = "ActivityRunStart", + ActivityRunEnd = "ActivityRunEnd", + TriggerName = "TriggerName", + TriggerRunTimestamp = "TriggerRunTimestamp" +} + +/** + * Defines values for RunQueryOrderByField. \ + * {@link KnownRunQueryOrderByField} can be used interchangeably with RunQueryOrderByField, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **RunStart** \ + * **RunEnd** \ + * **PipelineName** \ + * **Status** \ + * **ActivityName** \ + * **ActivityRunStart** \ + * **ActivityRunEnd** \ + * **TriggerName** \ + * **TriggerRunTimestamp** + */ +export type RunQueryOrderByField = string; + +/** + * Known values of {@link RunQueryOrder} that the service accepts. + */ +export const enum KnownRunQueryOrder { + ASC = "ASC", + Desc = "DESC" +} + +/** + * Defines values for RunQueryOrder. \ + * {@link KnownRunQueryOrder} can be used interchangeably with RunQueryOrder, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ASC** \ + * **DESC** + */ +export type RunQueryOrder = string; + +/** + * Known values of {@link TriggerRuntimeState} that the service accepts. + */ +export const enum KnownTriggerRuntimeState { + Started = "Started", + Stopped = "Stopped", + Disabled = "Disabled" +} + +/** + * Defines values for TriggerRuntimeState. \ + * {@link KnownTriggerRuntimeState} can be used interchangeably with TriggerRuntimeState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Started** \ + * **Stopped** \ + * **Disabled** + */ +export type TriggerRuntimeState = string; + +/** + * Known values of {@link EventSubscriptionStatus} that the service accepts. + */ +export const enum KnownEventSubscriptionStatus { + Enabled = "Enabled", + Provisioning = "Provisioning", + Deprovisioning = "Deprovisioning", + Disabled = "Disabled", + Unknown = "Unknown" +} + +/** + * Defines values for EventSubscriptionStatus. \ + * {@link KnownEventSubscriptionStatus} can be used interchangeably with EventSubscriptionStatus, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Enabled** \ + * **Provisioning** \ + * **Deprovisioning** \ + * **Disabled** \ + * **Unknown** + */ +export type EventSubscriptionStatus = string; + +/** + * Known values of {@link TriggerRunStatus} that the service accepts. + */ +export const enum KnownTriggerRunStatus { + Succeeded = "Succeeded", + Failed = "Failed", + Inprogress = "Inprogress" +} + +/** + * Defines values for TriggerRunStatus. \ + * {@link KnownTriggerRunStatus} can be used interchangeably with TriggerRunStatus, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Succeeded** \ + * **Failed** \ + * **Inprogress** + */ +export type TriggerRunStatus = string; + +/** + * Known values of {@link SqlScriptType} that the service accepts. + */ +export const enum KnownSqlScriptType { + SqlQuery = "SqlQuery" +} + +/** + * Defines values for SqlScriptType. \ + * {@link KnownSqlScriptType} can be used interchangeably with SqlScriptType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SqlQuery** + */ +export type SqlScriptType = string; + +/** + * Known values of {@link SqlConnectionType} that the service accepts. + */ +export const enum KnownSqlConnectionType { + SqlOnDemand = "SqlOnDemand", + SqlPool = "SqlPool" +} + +/** + * Defines values for SqlConnectionType. \ + * {@link KnownSqlConnectionType} can be used interchangeably with SqlConnectionType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SqlOnDemand** \ + * **SqlPool** + */ +export type SqlConnectionType = string; + +/** + * Known values of {@link BigDataPoolReferenceType} that the service accepts. + */ +export const enum KnownBigDataPoolReferenceType { + BigDataPoolReference = "BigDataPoolReference" +} + +/** + * Defines values for BigDataPoolReferenceType. \ + * {@link KnownBigDataPoolReferenceType} can be used interchangeably with BigDataPoolReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **BigDataPoolReference** + */ +export type BigDataPoolReferenceType = string; + +/** + * Known values of {@link SparkJobType} that the service accepts. + */ +export const enum KnownSparkJobType { + SparkBatch = "SparkBatch", + SparkSession = "SparkSession" +} + +/** + * Defines values for SparkJobType. \ + * {@link KnownSparkJobType} can be used interchangeably with SparkJobType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SparkBatch** \ + * **SparkSession** + */ +export type SparkJobType = string; + +/** + * Known values of {@link SparkBatchJobResultType} that the service accepts. + */ +export const enum KnownSparkBatchJobResultType { + Uncertain = "Uncertain", + Succeeded = "Succeeded", + Failed = "Failed", + Cancelled = "Cancelled" +} + +/** + * Defines values for SparkBatchJobResultType. \ + * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Uncertain** \ + * **Succeeded** \ + * **Failed** \ + * **Cancelled** + */ +export type SparkBatchJobResultType = string; + +/** + * Known values of {@link SchedulerCurrentState} that the service accepts. + */ +export const enum KnownSchedulerCurrentState { + Queued = "Queued", + Scheduled = "Scheduled", + Ended = "Ended" +} + +/** + * Defines values for SchedulerCurrentState. \ + * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Queued** \ + * **Scheduled** \ + * **Ended** + */ +export type SchedulerCurrentState = string; + +/** + * Known values of {@link PluginCurrentState} that the service accepts. + */ +export const enum KnownPluginCurrentState { + Preparation = "Preparation", + ResourceAcquisition = "ResourceAcquisition", + Queued = "Queued", + Submission = "Submission", + Monitoring = "Monitoring", + Cleanup = "Cleanup", + Ended = "Ended" +} + +/** + * Defines values for PluginCurrentState. \ + * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Preparation** \ + * **ResourceAcquisition** \ + * **Queued** \ + * **Submission** \ + * **Monitoring** \ + * **Cleanup** \ + * **Ended** + */ +export type PluginCurrentState = string; + +/** + * Known values of {@link SparkErrorSource} that the service accepts. + */ +export const enum KnownSparkErrorSource { + System = "System", + User = "User", + Unknown = "Unknown", + Dependency = "Dependency" +} + +/** + * Defines values for SparkErrorSource. \ + * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **System** \ + * **User** \ + * **Unknown** \ + * **Dependency** + */ +export type SparkErrorSource = string; + +/** + * Known values of {@link CellOutputType} that the service accepts. + */ +export const enum KnownCellOutputType { + ExecuteResult = "execute_result", + DisplayData = "display_data", + Stream = "stream", + Error = "error" +} + +/** + * Defines values for CellOutputType. \ + * {@link KnownCellOutputType} can be used interchangeably with CellOutputType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **execute_result** \ + * **display_data** \ + * **stream** \ + * **error** + */ +export type CellOutputType = string; + +/** + * Known values of {@link NodeSize} that the service accepts. + */ +export const enum KnownNodeSize { + None = "None", + Small = "Small", + Medium = "Medium", + Large = "Large", + XLarge = "XLarge", + XXLarge = "XXLarge", + XXXLarge = "XXXLarge" +} + +/** + * Defines values for NodeSize. \ + * {@link KnownNodeSize} can be used interchangeably with NodeSize, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **Small** \ + * **Medium** \ + * **Large** \ + * **XLarge** \ + * **XXLarge** \ + * **XXXLarge** + */ +export type NodeSize = string; + +/** + * Known values of {@link NodeSizeFamily} that the service accepts. + */ +export const enum KnownNodeSizeFamily { + None = "None", + MemoryOptimized = "MemoryOptimized" +} + +/** + * Defines values for NodeSizeFamily. \ + * {@link KnownNodeSizeFamily} can be used interchangeably with NodeSizeFamily, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **MemoryOptimized** + */ +export type NodeSizeFamily = string; + +/** + * Known values of {@link IntegrationRuntimeType} that the service accepts. + */ +export const enum KnownIntegrationRuntimeType { + Managed = "Managed", + SelfHosted = "SelfHosted" +} + +/** + * Defines values for IntegrationRuntimeType. \ + * {@link KnownIntegrationRuntimeType} can be used interchangeably with IntegrationRuntimeType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Managed** \ + * **SelfHosted** + */ +export type IntegrationRuntimeType = string; + +/** + * Known values of {@link ExpressionType} that the service accepts. + */ +export const enum KnownExpressionType { + Expression = "Expression" +} + +/** + * Defines values for ExpressionType. \ + * {@link KnownExpressionType} can be used interchangeably with ExpressionType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Expression** + */ +export type ExpressionType = string; + +/** + * Known values of {@link PipelineReferenceType} that the service accepts. + */ +export const enum KnownPipelineReferenceType { + PipelineReference = "PipelineReference" +} + +/** + * Defines values for PipelineReferenceType. \ + * {@link KnownPipelineReferenceType} can be used interchangeably with PipelineReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **PipelineReference** + */ +export type PipelineReferenceType = string; + +/** + * Known values of {@link DatasetReferenceType} that the service accepts. + */ +export const enum KnownDatasetReferenceType { + DatasetReference = "DatasetReference" +} + +/** + * Defines values for DatasetReferenceType. \ + * {@link KnownDatasetReferenceType} can be used interchangeably with DatasetReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **DatasetReference** + */ +export type DatasetReferenceType = string; + +/** + * Known values of {@link DataFlowReferenceType} that the service accepts. + */ +export const enum KnownDataFlowReferenceType { + DataFlowReference = "DataFlowReference" +} + +/** + * Defines values for DataFlowReferenceType. \ + * {@link KnownDataFlowReferenceType} can be used interchangeably with DataFlowReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **DataFlowReference** + */ +export type DataFlowReferenceType = string; + +/** + * Known values of {@link NotebookReferenceType} that the service accepts. + */ +export const enum KnownNotebookReferenceType { + NotebookReference = "NotebookReference" +} + +/** + * Defines values for NotebookReferenceType. \ + * {@link KnownNotebookReferenceType} can be used interchangeably with NotebookReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **NotebookReference** + */ +export type NotebookReferenceType = string; + +/** + * Known values of {@link SparkJobReferenceType} that the service accepts. + */ +export const enum KnownSparkJobReferenceType { + SparkJobDefinitionReference = "SparkJobDefinitionReference" +} + +/** + * Defines values for SparkJobReferenceType. \ + * {@link KnownSparkJobReferenceType} can be used interchangeably with SparkJobReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SparkJobDefinitionReference** + */ +export type SparkJobReferenceType = string; + +/** + * Known values of {@link SqlPoolReferenceType} that the service accepts. + */ +export const enum KnownSqlPoolReferenceType { + SqlPoolReference = "SqlPoolReference" +} + +/** + * Defines values for SqlPoolReferenceType. \ + * {@link KnownSqlPoolReferenceType} can be used interchangeably with SqlPoolReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SqlPoolReference** + */ +export type SqlPoolReferenceType = string; + +/** + * Known values of {@link JsonFormatFilePattern} that the service accepts. + */ +export const enum KnownJsonFormatFilePattern { + SetOfObjects = "setOfObjects", + ArrayOfObjects = "arrayOfObjects" +} + +/** + * Defines values for JsonFormatFilePattern. \ + * {@link KnownJsonFormatFilePattern} can be used interchangeably with JsonFormatFilePattern, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **setOfObjects** \ + * **arrayOfObjects** + */ +export type JsonFormatFilePattern = string; + +/** + * Known values of {@link DatasetCompressionLevel} that the service accepts. + */ +export const enum KnownDatasetCompressionLevel { + Optimal = "Optimal", + Fastest = "Fastest" +} + +/** + * Defines values for DatasetCompressionLevel. \ + * {@link KnownDatasetCompressionLevel} can be used interchangeably with DatasetCompressionLevel, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Optimal** \ + * **Fastest** + */ +export type DatasetCompressionLevel = string; + +/** + * Known values of {@link AvroCompressionCodec} that the service accepts. + */ +export const enum KnownAvroCompressionCodec { + None = "none", + Deflate = "deflate", + Snappy = "snappy", + Xz = "xz", + Bzip2 = "bzip2" +} + +/** + * Defines values for AvroCompressionCodec. \ + * {@link KnownAvroCompressionCodec} can be used interchangeably with AvroCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **none** \ + * **deflate** \ + * **snappy** \ + * **xz** \ + * **bzip2** + */ +export type AvroCompressionCodec = string; + +/** + * Known values of {@link ParquetCompressionCodec} that the service accepts. + */ +export const enum KnownParquetCompressionCodec { + None = "none", + Gzip = "gzip", + Snappy = "snappy", + Lzo = "lzo" +} + +/** + * Defines values for ParquetCompressionCodec. \ + * {@link KnownParquetCompressionCodec} can be used interchangeably with ParquetCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **none** \ + * **gzip** \ + * **snappy** \ + * **lzo** + */ +export type ParquetCompressionCodec = string; + +/** + * Known values of {@link DelimitedTextCompressionCodec} that the service accepts. + */ +export const enum KnownDelimitedTextCompressionCodec { + Bzip2 = "bzip2", + Gzip = "gzip", + Deflate = "deflate", + ZipDeflate = "zipDeflate", + Snappy = "snappy", + Lz4 = "lz4" +} + +/** + * Defines values for DelimitedTextCompressionCodec. \ + * {@link KnownDelimitedTextCompressionCodec} can be used interchangeably with DelimitedTextCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **bzip2** \ + * **gzip** \ + * **deflate** \ + * **zipDeflate** \ + * **snappy** \ + * **lz4** + */ +export type DelimitedTextCompressionCodec = string; + +/** + * Known values of {@link OrcCompressionCodec} that the service accepts. + */ +export const enum KnownOrcCompressionCodec { + None = "none", + Zlib = "zlib", + Snappy = "snappy" +} + +/** + * Defines values for OrcCompressionCodec. \ + * {@link KnownOrcCompressionCodec} can be used interchangeably with OrcCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **none** \ + * **zlib** \ + * **snappy** + */ +export type OrcCompressionCodec = string; + +/** + * Known values of {@link DynamicsDeploymentType} that the service accepts. + */ +export const enum KnownDynamicsDeploymentType { + Online = "Online", + OnPremisesWithIfd = "OnPremisesWithIfd" +} + +/** + * Defines values for DynamicsDeploymentType. \ + * {@link KnownDynamicsDeploymentType} can be used interchangeably with DynamicsDeploymentType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Online** \ + * **OnPremisesWithIfd** + */ +export type DynamicsDeploymentType = string; + +/** + * Known values of {@link DynamicsAuthenticationType} that the service accepts. + */ +export const enum KnownDynamicsAuthenticationType { + Office365 = "Office365", + Ifd = "Ifd", + AADServicePrincipal = "AADServicePrincipal" +} + +/** + * Defines values for DynamicsAuthenticationType. \ + * {@link KnownDynamicsAuthenticationType} can be used interchangeably with DynamicsAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Office365** \ + * **Ifd** \ + * **AADServicePrincipal** + */ +export type DynamicsAuthenticationType = string; + +/** + * Known values of {@link DynamicsServicePrincipalCredentialType} that the service accepts. + */ +export const enum KnownDynamicsServicePrincipalCredentialType { + ServicePrincipalKey = "ServicePrincipalKey", + ServicePrincipalCert = "ServicePrincipalCert" +} + +/** + * Defines values for DynamicsServicePrincipalCredentialType. \ + * {@link KnownDynamicsServicePrincipalCredentialType} can be used interchangeably with DynamicsServicePrincipalCredentialType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ServicePrincipalKey** \ + * **ServicePrincipalCert** + */ +export type DynamicsServicePrincipalCredentialType = string; + +/** + * Known values of {@link SybaseAuthenticationType} that the service accepts. + */ +export const enum KnownSybaseAuthenticationType { + Basic = "Basic", + Windows = "Windows" +} + +/** + * Defines values for SybaseAuthenticationType. \ + * {@link KnownSybaseAuthenticationType} can be used interchangeably with SybaseAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Windows** + */ +export type SybaseAuthenticationType = string; + +/** + * Known values of {@link Db2AuthenticationType} that the service accepts. + */ +export const enum KnownDb2AuthenticationType { + Basic = "Basic" +} + +/** + * Defines values for Db2AuthenticationType. \ + * {@link KnownDb2AuthenticationType} can be used interchangeably with Db2AuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** + */ +export type Db2AuthenticationType = string; + +/** + * Known values of {@link TeradataAuthenticationType} that the service accepts. + */ +export const enum KnownTeradataAuthenticationType { + Basic = "Basic", + Windows = "Windows" +} + +/** + * Defines values for TeradataAuthenticationType. \ + * {@link KnownTeradataAuthenticationType} can be used interchangeably with TeradataAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Windows** + */ +export type TeradataAuthenticationType = string; + +/** + * Known values of {@link ODataAuthenticationType} that the service accepts. + */ +export const enum KnownODataAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous", + Windows = "Windows", + AadServicePrincipal = "AadServicePrincipal", + ManagedServiceIdentity = "ManagedServiceIdentity" +} + +/** + * Defines values for ODataAuthenticationType. \ + * {@link KnownODataAuthenticationType} can be used interchangeably with ODataAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** \ + * **Windows** \ + * **AadServicePrincipal** \ + * **ManagedServiceIdentity** + */ +export type ODataAuthenticationType = string; + +/** + * Known values of {@link ODataAadServicePrincipalCredentialType} that the service accepts. + */ +export const enum KnownODataAadServicePrincipalCredentialType { + ServicePrincipalKey = "ServicePrincipalKey", + ServicePrincipalCert = "ServicePrincipalCert" +} + +/** + * Defines values for ODataAadServicePrincipalCredentialType. \ + * {@link KnownODataAadServicePrincipalCredentialType} can be used interchangeably with ODataAadServicePrincipalCredentialType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ServicePrincipalKey** \ + * **ServicePrincipalCert** + */ +export type ODataAadServicePrincipalCredentialType = string; + +/** + * Known values of {@link WebAuthenticationType} that the service accepts. + */ +export const enum KnownWebAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous", + ClientCertificate = "ClientCertificate" +} + +/** + * Defines values for WebAuthenticationType. \ + * {@link KnownWebAuthenticationType} can be used interchangeably with WebAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** \ + * **ClientCertificate** + */ +export type WebAuthenticationType = string; + +/** + * Known values of {@link MongoDbAuthenticationType} that the service accepts. + */ +export const enum KnownMongoDbAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous" +} + +/** + * Defines values for MongoDbAuthenticationType. \ + * {@link KnownMongoDbAuthenticationType} can be used interchangeably with MongoDbAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** + */ +export type MongoDbAuthenticationType = string; + +/** + * Known values of {@link RestServiceAuthenticationType} that the service accepts. + */ +export const enum KnownRestServiceAuthenticationType { + Anonymous = "Anonymous", + Basic = "Basic", + AadServicePrincipal = "AadServicePrincipal", + ManagedServiceIdentity = "ManagedServiceIdentity" +} + +/** + * Defines values for RestServiceAuthenticationType. \ + * {@link KnownRestServiceAuthenticationType} can be used interchangeably with RestServiceAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **Basic** \ + * **AadServicePrincipal** \ + * **ManagedServiceIdentity** + */ +export type RestServiceAuthenticationType = string; + +/** + * Known values of {@link HttpAuthenticationType} that the service accepts. + */ +export const enum KnownHttpAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous", + Digest = "Digest", + Windows = "Windows", + ClientCertificate = "ClientCertificate" +} + +/** + * Defines values for HttpAuthenticationType. \ + * {@link KnownHttpAuthenticationType} can be used interchangeably with HttpAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** \ + * **Digest** \ + * **Windows** \ + * **ClientCertificate** + */ +export type HttpAuthenticationType = string; + +/** + * Known values of {@link FtpAuthenticationType} that the service accepts. + */ +export const enum KnownFtpAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous" +} + +/** + * Defines values for FtpAuthenticationType. \ + * {@link KnownFtpAuthenticationType} can be used interchangeably with FtpAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** + */ +export type FtpAuthenticationType = string; + +/** + * Known values of {@link SftpAuthenticationType} that the service accepts. + */ +export const enum KnownSftpAuthenticationType { + Basic = "Basic", + SshPublicKey = "SshPublicKey" +} + +/** + * Defines values for SftpAuthenticationType. \ + * {@link KnownSftpAuthenticationType} can be used interchangeably with SftpAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **SshPublicKey** + */ +export type SftpAuthenticationType = string; + +/** + * Known values of {@link SapHanaAuthenticationType} that the service accepts. + */ +export const enum KnownSapHanaAuthenticationType { + Basic = "Basic", + Windows = "Windows" +} + +/** + * Defines values for SapHanaAuthenticationType. \ + * {@link KnownSapHanaAuthenticationType} can be used interchangeably with SapHanaAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Windows** + */ +export type SapHanaAuthenticationType = string; + +/** + * Known values of {@link GoogleBigQueryAuthenticationType} that the service accepts. + */ +export const enum KnownGoogleBigQueryAuthenticationType { + ServiceAuthentication = "ServiceAuthentication", + UserAuthentication = "UserAuthentication" +} + +/** + * Defines values for GoogleBigQueryAuthenticationType. \ + * {@link KnownGoogleBigQueryAuthenticationType} can be used interchangeably with GoogleBigQueryAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ServiceAuthentication** \ + * **UserAuthentication** + */ +export type GoogleBigQueryAuthenticationType = string; + +/** + * Known values of {@link HBaseAuthenticationType} that the service accepts. + */ +export const enum KnownHBaseAuthenticationType { + Anonymous = "Anonymous", + Basic = "Basic" +} + +/** + * Defines values for HBaseAuthenticationType. \ + * {@link KnownHBaseAuthenticationType} can be used interchangeably with HBaseAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **Basic** + */ +export type HBaseAuthenticationType = string; + +/** + * Known values of {@link HiveServerType} that the service accepts. + */ +export const enum KnownHiveServerType { + HiveServer1 = "HiveServer1", + HiveServer2 = "HiveServer2", + HiveThriftServer = "HiveThriftServer" +} + +/** + * Defines values for HiveServerType. \ + * {@link KnownHiveServerType} can be used interchangeably with HiveServerType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **HiveServer1** \ + * **HiveServer2** \ + * **HiveThriftServer** + */ +export type HiveServerType = string; + +/** + * Known values of {@link HiveThriftTransportProtocol} that the service accepts. + */ +export const enum KnownHiveThriftTransportProtocol { + Binary = "Binary", + Sasl = "SASL", + Http = "HTTP " +} + +/** + * Defines values for HiveThriftTransportProtocol. \ + * {@link KnownHiveThriftTransportProtocol} can be used interchangeably with HiveThriftTransportProtocol, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Binary** \ + * **SASL** \ + * **HTTP ** + */ +export type HiveThriftTransportProtocol = string; + +/** + * Known values of {@link HiveAuthenticationType} that the service accepts. + */ +export const enum KnownHiveAuthenticationType { + Anonymous = "Anonymous", + Username = "Username", + UsernameAndPassword = "UsernameAndPassword", + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +/** + * Defines values for HiveAuthenticationType. \ + * {@link KnownHiveAuthenticationType} can be used interchangeably with HiveAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **Username** \ + * **UsernameAndPassword** \ + * **WindowsAzureHDInsightService** + */ +export type HiveAuthenticationType = string; + +/** + * Known values of {@link ImpalaAuthenticationType} that the service accepts. + */ +export const enum KnownImpalaAuthenticationType { + Anonymous = "Anonymous", + SaslUsername = "SASLUsername", + UsernameAndPassword = "UsernameAndPassword" +} + +/** + * Defines values for ImpalaAuthenticationType. \ + * {@link KnownImpalaAuthenticationType} can be used interchangeably with ImpalaAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **SASLUsername** \ + * **UsernameAndPassword** + */ +export type ImpalaAuthenticationType = string; + +/** + * Known values of {@link PhoenixAuthenticationType} that the service accepts. + */ +export const enum KnownPhoenixAuthenticationType { + Anonymous = "Anonymous", + UsernameAndPassword = "UsernameAndPassword", + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +/** + * Defines values for PhoenixAuthenticationType. \ + * {@link KnownPhoenixAuthenticationType} can be used interchangeably with PhoenixAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **UsernameAndPassword** \ + * **WindowsAzureHDInsightService** + */ +export type PhoenixAuthenticationType = string; + +/** + * Known values of {@link PrestoAuthenticationType} that the service accepts. + */ +export const enum KnownPrestoAuthenticationType { + Anonymous = "Anonymous", + Ldap = "LDAP" +} + +/** + * Defines values for PrestoAuthenticationType. \ + * {@link KnownPrestoAuthenticationType} can be used interchangeably with PrestoAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **LDAP** + */ +export type PrestoAuthenticationType = string; + +/** + * Known values of {@link ServiceNowAuthenticationType} that the service accepts. + */ +export const enum KnownServiceNowAuthenticationType { + Basic = "Basic", + OAuth2 = "OAuth2" +} + +/** + * Defines values for ServiceNowAuthenticationType. \ + * {@link KnownServiceNowAuthenticationType} can be used interchangeably with ServiceNowAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **OAuth2** + */ +export type ServiceNowAuthenticationType = string; + +/** + * Known values of {@link SparkServerType} that the service accepts. + */ +export const enum KnownSparkServerType { + SharkServer = "SharkServer", + SharkServer2 = "SharkServer2", + SparkThriftServer = "SparkThriftServer" +} + +/** + * Defines values for SparkServerType. \ + * {@link KnownSparkServerType} can be used interchangeably with SparkServerType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SharkServer** \ + * **SharkServer2** \ + * **SparkThriftServer** + */ +export type SparkServerType = string; + +/** + * Known values of {@link SparkThriftTransportProtocol} that the service accepts. + */ +export const enum KnownSparkThriftTransportProtocol { + Binary = "Binary", + Sasl = "SASL", + Http = "HTTP " +} + +/** + * Defines values for SparkThriftTransportProtocol. \ + * {@link KnownSparkThriftTransportProtocol} can be used interchangeably with SparkThriftTransportProtocol, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Binary** \ + * **SASL** \ + * **HTTP ** + */ +export type SparkThriftTransportProtocol = string; + +/** + * Known values of {@link SparkAuthenticationType} that the service accepts. + */ +export const enum KnownSparkAuthenticationType { + Anonymous = "Anonymous", + Username = "Username", + UsernameAndPassword = "UsernameAndPassword", + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +/** + * Defines values for SparkAuthenticationType. \ + * {@link KnownSparkAuthenticationType} can be used interchangeably with SparkAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **Username** \ + * **UsernameAndPassword** \ + * **WindowsAzureHDInsightService** + */ +export type SparkAuthenticationType = string; + +/** + * Known values of {@link HdiNodeTypes} that the service accepts. + */ +export const enum KnownHdiNodeTypes { + Headnode = "Headnode", + Workernode = "Workernode", + Zookeeper = "Zookeeper" +} + +/** + * Defines values for HdiNodeTypes. \ + * {@link KnownHdiNodeTypes} can be used interchangeably with HdiNodeTypes, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Headnode** \ + * **Workernode** \ + * **Zookeeper** + */ +export type HdiNodeTypes = string; + +/** + * Known values of {@link GoogleAdWordsAuthenticationType} that the service accepts. + */ +export const enum KnownGoogleAdWordsAuthenticationType { + ServiceAuthentication = "ServiceAuthentication", + UserAuthentication = "UserAuthentication" +} + +/** + * Defines values for GoogleAdWordsAuthenticationType. \ + * {@link KnownGoogleAdWordsAuthenticationType} can be used interchangeably with GoogleAdWordsAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ServiceAuthentication** \ + * **UserAuthentication** + */ +export type GoogleAdWordsAuthenticationType = string; + +/** + * Known values of {@link JsonWriteFilePattern} that the service accepts. + */ +export const enum KnownJsonWriteFilePattern { + SetOfObjects = "setOfObjects", + ArrayOfObjects = "arrayOfObjects" +} + +/** + * Defines values for JsonWriteFilePattern. \ + * {@link KnownJsonWriteFilePattern} can be used interchangeably with JsonWriteFilePattern, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **setOfObjects** \ + * **arrayOfObjects** + */ +export type JsonWriteFilePattern = string; + +/** + * Known values of {@link SalesforceSourceReadBehavior} that the service accepts. + */ +export const enum KnownSalesforceSourceReadBehavior { + Query = "Query", + QueryAll = "QueryAll" +} + +/** + * Defines values for SalesforceSourceReadBehavior. \ + * {@link KnownSalesforceSourceReadBehavior} can be used interchangeably with SalesforceSourceReadBehavior, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Query** \ + * **QueryAll** + */ +export type SalesforceSourceReadBehavior = string; + +/** + * Known values of {@link SapHanaPartitionOption} that the service accepts. + */ +export const enum KnownSapHanaPartitionOption { + None = "None", + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", + SapHanaDynamicRange = "SapHanaDynamicRange" +} + +/** + * Defines values for SapHanaPartitionOption. \ + * {@link KnownSapHanaPartitionOption} can be used interchangeably with SapHanaPartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **PhysicalPartitionsOfTable** \ + * **SapHanaDynamicRange** + */ +export type SapHanaPartitionOption = string; + +/** + * Known values of {@link SapTablePartitionOption} that the service accepts. + */ +export const enum KnownSapTablePartitionOption { + None = "None", + PartitionOnInt = "PartitionOnInt", + PartitionOnCalendarYear = "PartitionOnCalendarYear", + PartitionOnCalendarMonth = "PartitionOnCalendarMonth", + PartitionOnCalendarDate = "PartitionOnCalendarDate", + PartitionOnTime = "PartitionOnTime" +} + +/** + * Defines values for SapTablePartitionOption. \ + * {@link KnownSapTablePartitionOption} can be used interchangeably with SapTablePartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **PartitionOnInt** \ + * **PartitionOnCalendarYear** \ + * **PartitionOnCalendarMonth** \ + * **PartitionOnCalendarDate** \ + * **PartitionOnTime** + */ +export type SapTablePartitionOption = string; + +/** + * Known values of {@link StoredProcedureParameterType} that the service accepts. + */ +export const enum KnownStoredProcedureParameterType { + String = "String", + Int = "Int", + Int64 = "Int64", + Decimal = "Decimal", + Guid = "Guid", + Boolean = "Boolean", + Date = "Date" +} + +/** + * Defines values for StoredProcedureParameterType. \ + * {@link KnownStoredProcedureParameterType} can be used interchangeably with StoredProcedureParameterType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **String** \ + * **Int** \ + * **Int64** \ + * **Decimal** \ + * **Guid** \ + * **Boolean** \ + * **Date** + */ +export type StoredProcedureParameterType = string; + +/** + * Known values of {@link OraclePartitionOption} that the service accepts. + */ +export const enum KnownOraclePartitionOption { + None = "None", + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", + DynamicRange = "DynamicRange" +} + +/** + * Defines values for OraclePartitionOption. \ + * {@link KnownOraclePartitionOption} can be used interchangeably with OraclePartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **PhysicalPartitionsOfTable** \ + * **DynamicRange** + */ +export type OraclePartitionOption = string; + +/** + * Known values of {@link TeradataPartitionOption} that the service accepts. + */ +export const enum KnownTeradataPartitionOption { + None = "None", + Hash = "Hash", + DynamicRange = "DynamicRange" +} + +/** + * Defines values for TeradataPartitionOption. \ + * {@link KnownTeradataPartitionOption} can be used interchangeably with TeradataPartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **Hash** \ + * **DynamicRange** + */ +export type TeradataPartitionOption = string; + +/** + * Known values of {@link CassandraSourceReadConsistencyLevels} that the service accepts. + */ +export const enum KnownCassandraSourceReadConsistencyLevels { + ALL = "ALL", + EachQuorum = "EACH_QUORUM", + Quorum = "QUORUM", + LocalQuorum = "LOCAL_QUORUM", + ONE = "ONE", + TWO = "TWO", + Three = "THREE", + LocalONE = "LOCAL_ONE", + Serial = "SERIAL", + LocalSerial = "LOCAL_SERIAL" +} + +/** + * Defines values for CassandraSourceReadConsistencyLevels. \ + * {@link KnownCassandraSourceReadConsistencyLevels} can be used interchangeably with CassandraSourceReadConsistencyLevels, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ALL** \ + * **EACH_QUORUM** \ + * **QUORUM** \ + * **LOCAL_QUORUM** \ + * **ONE** \ + * **TWO** \ + * **THREE** \ + * **LOCAL_ONE** \ + * **SERIAL** \ + * **LOCAL_SERIAL** + */ +export type CassandraSourceReadConsistencyLevels = string; + +/** + * Known values of {@link NetezzaPartitionOption} that the service accepts. + */ +export const enum KnownNetezzaPartitionOption { + None = "None", + DataSlice = "DataSlice", + DynamicRange = "DynamicRange" +} + +/** + * Defines values for NetezzaPartitionOption. \ + * {@link KnownNetezzaPartitionOption} can be used interchangeably with NetezzaPartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **DataSlice** \ + * **DynamicRange** + */ +export type NetezzaPartitionOption = string; + +/** + * Known values of {@link SapCloudForCustomerSinkWriteBehavior} that the service accepts. + */ +export const enum KnownSapCloudForCustomerSinkWriteBehavior { + Insert = "Insert", + Update = "Update" +} + +/** + * Defines values for SapCloudForCustomerSinkWriteBehavior. \ + * {@link KnownSapCloudForCustomerSinkWriteBehavior} can be used interchangeably with SapCloudForCustomerSinkWriteBehavior, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Insert** \ + * **Update** + */ +export type SapCloudForCustomerSinkWriteBehavior = string; + +/** + * Known values of {@link PolybaseSettingsRejectType} that the service accepts. + */ +export const enum KnownPolybaseSettingsRejectType { + Value = "value", + Percentage = "percentage" +} + +/** + * Defines values for PolybaseSettingsRejectType. \ + * {@link KnownPolybaseSettingsRejectType} can be used interchangeably with PolybaseSettingsRejectType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **value** \ + * **percentage** + */ +export type PolybaseSettingsRejectType = string; + +/** + * Known values of {@link AzureSearchIndexWriteBehaviorType} that the service accepts. + */ +export const enum KnownAzureSearchIndexWriteBehaviorType { + Merge = "Merge", + Upload = "Upload" +} + +/** + * Defines values for AzureSearchIndexWriteBehaviorType. \ + * {@link KnownAzureSearchIndexWriteBehaviorType} can be used interchangeably with AzureSearchIndexWriteBehaviorType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Merge** \ + * **Upload** + */ +export type AzureSearchIndexWriteBehaviorType = string; + +/** + * Known values of {@link DynamicsSinkWriteBehavior} that the service accepts. + */ +export const enum KnownDynamicsSinkWriteBehavior { + Upsert = "Upsert" +} + +/** + * Defines values for DynamicsSinkWriteBehavior. \ + * {@link KnownDynamicsSinkWriteBehavior} can be used interchangeably with DynamicsSinkWriteBehavior, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Upsert** + */ +export type DynamicsSinkWriteBehavior = string; + +/** + * Known values of {@link SalesforceSinkWriteBehavior} that the service accepts. + */ +export const enum KnownSalesforceSinkWriteBehavior { + Insert = "Insert", + Upsert = "Upsert" +} + +/** + * Defines values for SalesforceSinkWriteBehavior. \ + * {@link KnownSalesforceSinkWriteBehavior} can be used interchangeably with SalesforceSinkWriteBehavior, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Insert** \ + * **Upsert** + */ +export type SalesforceSinkWriteBehavior = string; + +/** + * Known values of {@link HDInsightActivityDebugInfoOption} that the service accepts. + */ +export const enum KnownHDInsightActivityDebugInfoOption { + None = "None", + Always = "Always", + Failure = "Failure" +} + +/** + * Defines values for HDInsightActivityDebugInfoOption. \ + * {@link KnownHDInsightActivityDebugInfoOption} can be used interchangeably with HDInsightActivityDebugInfoOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **Always** \ + * **Failure** + */ +export type HDInsightActivityDebugInfoOption = string; + +/** + * Known values of {@link SsisPackageLocationType} that the service accepts. + */ +export const enum KnownSsisPackageLocationType { + Ssisdb = "SSISDB", + File = "File", + InlinePackage = "InlinePackage" +} + +/** + * Defines values for SsisPackageLocationType. \ + * {@link KnownSsisPackageLocationType} can be used interchangeably with SsisPackageLocationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SSISDB** \ + * **File** \ + * **InlinePackage** + */ +export type SsisPackageLocationType = string; + +/** + * Known values of {@link SsisLogLocationType} that the service accepts. + */ +export const enum KnownSsisLogLocationType { + File = "File" +} + +/** + * Defines values for SsisLogLocationType. \ + * {@link KnownSsisLogLocationType} can be used interchangeably with SsisLogLocationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **File** + */ +export type SsisLogLocationType = string; + +/** + * Known values of {@link WebActivityMethod} that the service accepts. + */ +export const enum KnownWebActivityMethod { + GET = "GET", + Post = "POST", + PUT = "PUT", + Delete = "DELETE" +} + +/** + * Defines values for WebActivityMethod. \ + * {@link KnownWebActivityMethod} can be used interchangeably with WebActivityMethod, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **GET** \ + * **POST** \ + * **PUT** \ + * **DELETE** + */ +export type WebActivityMethod = string; + +/** + * Known values of {@link AzureFunctionActivityMethod} that the service accepts. + */ +export const enum KnownAzureFunctionActivityMethod { + GET = "GET", + Post = "POST", + PUT = "PUT", + Delete = "DELETE", + Options = "OPTIONS", + Head = "HEAD", + Trace = "TRACE" +} + +/** + * Defines values for AzureFunctionActivityMethod. \ + * {@link KnownAzureFunctionActivityMethod} can be used interchangeably with AzureFunctionActivityMethod, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **GET** \ + * **POST** \ + * **PUT** \ + * **DELETE** \ + * **OPTIONS** \ + * **HEAD** \ + * **TRACE** + */ +export type AzureFunctionActivityMethod = string; + +/** + * Known values of {@link WebHookActivityMethod} that the service accepts. + */ +export const enum KnownWebHookActivityMethod { + Post = "POST" +} + +/** + * Defines values for WebHookActivityMethod. \ + * {@link KnownWebHookActivityMethod} can be used interchangeably with WebHookActivityMethod, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **POST** + */ +export type WebHookActivityMethod = string; + +/** + * Known values of {@link DataFlowComputeType} that the service accepts. + */ +export const enum KnownDataFlowComputeType { + General = "General", + MemoryOptimized = "MemoryOptimized", + ComputeOptimized = "ComputeOptimized" +} + +/** + * Defines values for DataFlowComputeType. \ + * {@link KnownDataFlowComputeType} can be used interchangeably with DataFlowComputeType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **General** \ + * **MemoryOptimized** \ + * **ComputeOptimized** + */ +export type DataFlowComputeType = string; + +/** + * Known values of {@link RecurrenceFrequency} that the service accepts. + */ +export const enum KnownRecurrenceFrequency { + NotSpecified = "NotSpecified", + Minute = "Minute", + Hour = "Hour", + Day = "Day", + Week = "Week", + Month = "Month", + Year = "Year" +} + +/** + * Defines values for RecurrenceFrequency. \ + * {@link KnownRecurrenceFrequency} can be used interchangeably with RecurrenceFrequency, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **NotSpecified** \ + * **Minute** \ + * **Hour** \ + * **Day** \ + * **Week** \ + * **Month** \ + * **Year** + */ +export type RecurrenceFrequency = string; + +/** + * Known values of {@link BlobEventTypes} that the service accepts. + */ +export const enum KnownBlobEventTypes { + MicrosoftStorageBlobCreated = "Microsoft.Storage.BlobCreated", + MicrosoftStorageBlobDeleted = "Microsoft.Storage.BlobDeleted" +} + +/** + * Defines values for BlobEventTypes. \ + * {@link KnownBlobEventTypes} can be used interchangeably with BlobEventTypes, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Microsoft.Storage.BlobCreated** \ + * **Microsoft.Storage.BlobDeleted** + */ +export type BlobEventTypes = string; + +/** + * Known values of {@link TumblingWindowFrequency} that the service accepts. + */ +export const enum KnownTumblingWindowFrequency { + Minute = "Minute", + Hour = "Hour" +} + +/** + * Defines values for TumblingWindowFrequency. \ + * {@link KnownTumblingWindowFrequency} can be used interchangeably with TumblingWindowFrequency, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Minute** \ + * **Hour** + */ +export type TumblingWindowFrequency = string; + +/** + * Known values of {@link TriggerReferenceType} that the service accepts. + */ +export const enum KnownTriggerReferenceType { + TriggerReference = "TriggerReference" +} + +/** + * Defines values for TriggerReferenceType. \ + * {@link KnownTriggerReferenceType} can be used interchangeably with TriggerReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **TriggerReference** + */ +export type TriggerReferenceType = string; + +/** + * Known values of {@link IntegrationRuntimeState} that the service accepts. + */ +export const enum KnownIntegrationRuntimeState { + Initial = "Initial", + Stopped = "Stopped", + Started = "Started", + Starting = "Starting", + Stopping = "Stopping", + NeedRegistration = "NeedRegistration", + Online = "Online", + Limited = "Limited", + Offline = "Offline", + AccessDenied = "AccessDenied" +} + +/** + * Defines values for IntegrationRuntimeState. \ + * {@link KnownIntegrationRuntimeState} can be used interchangeably with IntegrationRuntimeState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Initial** \ + * **Stopped** \ + * **Started** \ + * **Starting** \ + * **Stopping** \ + * **NeedRegistration** \ + * **Online** \ + * **Limited** \ + * **Offline** \ + * **AccessDenied** + */ +export type IntegrationRuntimeState = string; + +/** + * Known values of {@link IntegrationRuntimeSsisCatalogPricingTier} that the service accepts. + */ +export const enum KnownIntegrationRuntimeSsisCatalogPricingTier { + Basic = "Basic", + Standard = "Standard", + Premium = "Premium", + PremiumRS = "PremiumRS" +} + +/** + * Defines values for IntegrationRuntimeSsisCatalogPricingTier. \ + * {@link KnownIntegrationRuntimeSsisCatalogPricingTier} can be used interchangeably with IntegrationRuntimeSsisCatalogPricingTier, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Standard** \ + * **Premium** \ + * **PremiumRS** + */ +export type IntegrationRuntimeSsisCatalogPricingTier = string; + +/** + * Known values of {@link IntegrationRuntimeLicenseType} that the service accepts. + */ +export const enum KnownIntegrationRuntimeLicenseType { + BasePrice = "BasePrice", + LicenseIncluded = "LicenseIncluded" +} + +/** + * Defines values for IntegrationRuntimeLicenseType. \ + * {@link KnownIntegrationRuntimeLicenseType} can be used interchangeably with IntegrationRuntimeLicenseType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **BasePrice** \ + * **LicenseIncluded** + */ +export type IntegrationRuntimeLicenseType = string; + +/** + * Known values of {@link IntegrationRuntimeEntityReferenceType} that the service accepts. + */ +export const enum KnownIntegrationRuntimeEntityReferenceType { + IntegrationRuntimeReference = "IntegrationRuntimeReference", + LinkedServiceReference = "LinkedServiceReference" +} + +/** + * Defines values for IntegrationRuntimeEntityReferenceType. \ + * {@link KnownIntegrationRuntimeEntityReferenceType} can be used interchangeably with IntegrationRuntimeEntityReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **IntegrationRuntimeReference** \ + * **LinkedServiceReference** + */ +export type IntegrationRuntimeEntityReferenceType = string; + +/** + * Known values of {@link IntegrationRuntimeEdition} that the service accepts. + */ +export const enum KnownIntegrationRuntimeEdition { + Standard = "Standard", + Enterprise = "Enterprise" +} + +/** + * Defines values for IntegrationRuntimeEdition. \ + * {@link KnownIntegrationRuntimeEdition} can be used interchangeably with IntegrationRuntimeEdition, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Standard** \ + * **Enterprise** + */ +export type IntegrationRuntimeEdition = string; + +/** + * Known values of {@link CopyBehaviorType} that the service accepts. + */ +export const enum KnownCopyBehaviorType { + PreserveHierarchy = "PreserveHierarchy", + FlattenHierarchy = "FlattenHierarchy", + MergeFiles = "MergeFiles" +} + +/** + * Defines values for CopyBehaviorType. \ + * {@link KnownCopyBehaviorType} can be used interchangeably with CopyBehaviorType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **PreserveHierarchy** \ + * **FlattenHierarchy** \ + * **MergeFiles** + */ +export type CopyBehaviorType = string; +/** + * Defines values for ResourceIdentityType. + */ +export type ResourceIdentityType = "None" | "SystemAssigned"; +/** + * Defines values for DayOfWeek. + */ +export type DayOfWeek = + | "Sunday" + | "Monday" + | "Tuesday" + | "Wednesday" + | "Thursday" + | "Friday" + | "Saturday"; + +/** + * Contains response data for the getLinkedServicesByWorkspace operation. + */ +export type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: LinkedServiceListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateLinkedService operation. + */ +export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: LinkedServiceResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface LinkedServiceGetLinkedServiceOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getLinkedService operation. + */ +export type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: LinkedServiceResource; + }; +}; + +/** + * Contains response data for the getLinkedServicesByWorkspaceNext operation. + */ +export type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: LinkedServiceListResponse; + }; +}; + +/** + * Contains response data for the getDatasetsByWorkspace operation. + */ +export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DatasetListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface DatasetCreateOrUpdateDatasetOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateDataset operation. + */ +export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DatasetResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface DatasetGetDatasetOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getDataset operation. + */ +export type DatasetGetDatasetResponse = DatasetResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DatasetResource; + }; +}; + +/** + * Contains response data for the getDatasetsByWorkspaceNext operation. + */ +export type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DatasetListResponse; + }; +}; + +/** + * Contains response data for the getPipelinesByWorkspace operation. + */ +export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface PipelineCreateOrUpdatePipelineOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdatePipeline operation. + */ +export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface PipelineGetPipelineOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getPipeline operation. + */ +export type PipelineGetPipelineResponse = PipelineResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineResource; + }; +}; + +/** + * Optional parameters. + */ +export interface PipelineCreatePipelineRunOptionalParams + extends coreHttp.OperationOptions { + /** + * Parameters of the pipeline run. These parameters will be used only if the runId is not specified. + */ + parameters?: { [propertyName: string]: any }; + /** + * The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. + */ + referencePipelineRunId?: string; + /** + * Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId. + */ + isRecovery?: boolean; + /** + * In recovery mode, the rerun will start from this activity. If not specified, all activities will run. + */ + startActivityName?: string; +} + +/** + * Contains response data for the createPipelineRun operation. + */ +export type PipelineCreatePipelineRunResponse = CreateRunResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: CreateRunResponse; + }; +}; + +/** + * Contains response data for the getPipelinesByWorkspaceNext operation. + */ +export type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineListResponse; + }; +}; + +/** + * Contains response data for the queryPipelineRunsByWorkspace operation. + */ +export type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineRunsQueryResponse; + }; +}; + +/** + * Contains response data for the getPipelineRun operation. + */ +export type PipelineRunGetPipelineRunResponse = PipelineRun & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineRun; + }; +}; + +/** + * Contains response data for the queryActivityRuns operation. + */ +export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ActivityRunsQueryResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface PipelineRunCancelPipelineRunOptionalParams + extends coreHttp.OperationOptions { + /** + * If true, cancel all the Child pipelines that are triggered by the current pipeline. + */ + isRecursive?: boolean; +} + +/** + * Contains response data for the getTriggersByWorkspace operation. + */ +export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface TriggerCreateOrUpdateTriggerOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateTrigger operation. + */ +export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface TriggerGetTriggerOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getTrigger operation. + */ +export type TriggerGetTriggerResponse = TriggerResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerResource; + }; +}; + +/** + * Contains response data for the subscribeTriggerToEvents operation. + */ +export type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerSubscriptionOperationStatus; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the getEventSubscriptionStatus operation. + */ +export type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerSubscriptionOperationStatus; + }; +}; + +/** + * Contains response data for the unsubscribeTriggerFromEvents operation. + */ +export type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerSubscriptionOperationStatus; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the getTriggersByWorkspaceNext operation. + */ +export type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerListResponse; + }; +}; + +/** + * Contains response data for the queryTriggerRunsByWorkspace operation. + */ +export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerRunsQueryResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface DataFlowCreateOrUpdateDataFlowOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateDataFlow operation. + */ +export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface DataFlowGetDataFlowOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getDataFlow operation. + */ +export type DataFlowGetDataFlowResponse = DataFlowResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowResource; + }; +}; + +/** + * Contains response data for the getDataFlowsByWorkspace operation. + */ +export type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowListResponse; + }; +}; + +/** + * Contains response data for the getDataFlowsByWorkspaceNext operation. + */ +export type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowListResponse; + }; +}; + +/** + * Contains response data for the createDataFlowDebugSession operation. + */ +export type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: CreateDataFlowDebugSessionResponse; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the queryDataFlowDebugSessionsByWorkspace operation. + */ +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: QueryDataFlowDebugSessionsResponse; + }; +}; + +/** + * Contains response data for the addDataFlow operation. + */ +export type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: AddDataFlowToDebugSessionResponse; + }; +}; + +/** + * Contains response data for the executeCommand operation. + */ +export type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowDebugCommandResponse; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the queryDataFlowDebugSessionsByWorkspaceNext operation. + */ +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: QueryDataFlowDebugSessionsResponse; + }; +}; + +/** + * Contains response data for the getSqlScriptsByWorkspace operation. + */ +export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlScriptsListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateSqlScript operation. + */ +export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlScriptResource; + }; +}; + +/** + * Optional parameters. + */ +export interface SqlScriptGetSqlScriptOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getSqlScript operation. + */ +export type SqlScriptGetSqlScriptResponse = SqlScriptResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlScriptResource; + }; +}; + +/** + * Contains response data for the getSqlScriptsByWorkspaceNext operation. + */ +export type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlScriptsListResponse; + }; +}; + +/** + * Contains response data for the getSparkJobDefinitionsByWorkspace operation. + */ +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobDefinitionsListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the Spark Job Definition entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateSparkJobDefinition operation. + */ +export type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobDefinitionResource; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the Spark Job Definition entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getSparkJobDefinition operation. + */ +export type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobDefinitionResource; + }; +}; + +/** + * Contains response data for the executeSparkJobDefinition operation. + */ +export type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJob; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the debugSparkJobDefinition operation. + */ +export type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJob; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the getSparkJobDefinitionsByWorkspaceNext operation. + */ +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobDefinitionsListResponse; + }; +}; + +/** + * Contains response data for the getNotebooksByWorkspace operation. + */ +export type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookListResponse; + }; +}; + +/** + * Contains response data for the getNotebookSummaryByWorkSpace operation. + */ +export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface NotebookCreateOrUpdateNotebookOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateNotebook operation. + */ +export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface NotebookGetNotebookOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getNotebook operation. + */ +export type NotebookGetNotebookResponse = NotebookResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookResource; + }; +}; + +/** + * Contains response data for the getNotebooksByWorkspaceNext operation. + */ +export type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookListResponse; + }; +}; + +/** + * Contains response data for the getNotebookSummaryByWorkSpaceNext operation. + */ +export type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookListResponse; + }; +}; + +/** + * Contains response data for the get operation. + */ +export type WorkspaceGetResponse = Workspace & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: Workspace; + }; +}; + +/** + * Contains response data for the list operation. + */ +export type SqlPoolsListResponse = SqlPoolInfoListResult & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlPoolInfoListResult; + }; +}; + +/** + * Contains response data for the get operation. + */ +export type SqlPoolsGetResponse = SqlPool & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlPool; + }; +}; + +/** + * Contains response data for the list operation. + */ +export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: BigDataPoolResourceInfoListResult; + }; +}; + +/** + * Contains response data for the get operation. + */ +export type BigDataPoolsGetResponse = BigDataPoolResourceInfo & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: BigDataPoolResourceInfo; + }; +}; + +/** + * Contains response data for the list operation. + */ +export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: IntegrationRuntimeListResponse; + }; +}; + +/** + * Contains response data for the get operation. + */ +export type IntegrationRuntimesGetResponse = IntegrationRuntimeResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: IntegrationRuntimeResource; + }; +}; + +/** + * Optional parameters. + */ +export interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams + extends coreHttp.OperationOptions { + /** + * Can provide a guid, which is helpful for debugging and to provide better customer support + */ + clientRequestId?: string; +} + +/** + * Contains response data for the getGitHubAccessToken operation. + */ +export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: GitHubAccessTokenResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface ArtifactsClientOptionalParams + extends coreHttp.ServiceClientOptions { + /** + * Api Version + */ + apiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts new file mode 100644 index 000000000000..6335e25515f9 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -0,0 +1,21533 @@ +import * as coreHttp from "@azure/core-http"; + +export const LinkedServiceListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const LinkedService: coreHttp.CompositeMapper = { + serializedName: "LinkedService", + type: { + name: "Composite", + className: "LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + connectVia: { + serializedName: "connectVia", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "ParameterSpecification" } + } + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + } + } + } +}; + +export const IntegrationRuntimeReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const ParameterSpecification: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ParameterSpecification", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + defaultValue: { + serializedName: "defaultValue", + type: { + name: "any" + } + } + } + } +}; + +export const Resource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Resource", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + readOnly: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const CloudError: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CloudError", + modelProperties: { + code: { + serializedName: "error.code", + required: true, + type: { + name: "String" + } + }, + message: { + serializedName: "error.message", + required: true, + type: { + name: "String" + } + }, + target: { + serializedName: "error.target", + type: { + name: "String" + } + }, + details: { + serializedName: "error.details", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CloudError" + } + } + } + } + } + } +}; + +export const ArtifactRenameRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ArtifactRenameRequest", + modelProperties: { + newName: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "newName", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const Dataset: coreHttp.CompositeMapper = { + serializedName: "Dataset", + type: { + name: "Composite", + className: "Dataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + structure: { + serializedName: "structure", + type: { + name: "any" + } + }, + schema: { + serializedName: "schema", + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "ParameterSpecification" } + } + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + folder: { + serializedName: "folder", + type: { + name: "Composite", + className: "DatasetFolder" + } + } + } + } +}; + +export const LinkedServiceReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const DatasetFolder: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetFolder", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const PipelineListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PipelineResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const Activity: coreHttp.CompositeMapper = { + serializedName: "Activity", + type: { + name: "Composite", + className: "Activity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + dependsOn: { + serializedName: "dependsOn", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ActivityDependency" + } + } + } + }, + userProperties: { + serializedName: "userProperties", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "UserProperty" + } + } + } + } + } + } +}; + +export const ActivityDependency: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityDependency", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + activity: { + serializedName: "activity", + required: true, + type: { + name: "String" + } + }, + dependencyConditions: { + serializedName: "dependencyConditions", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const UserProperty: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "UserProperty", + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + value: { + serializedName: "value", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const VariableSpecification: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "VariableSpecification", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + defaultValue: { + serializedName: "defaultValue", + type: { + name: "any" + } + } + } + } +}; + +export const PipelineFolder: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineFolder", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const CreateRunResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CreateRunResponse", + modelProperties: { + runId: { + serializedName: "runId", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const RunFilterParameters: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RunFilterParameters", + modelProperties: { + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } + }, + lastUpdatedAfter: { + serializedName: "lastUpdatedAfter", + required: true, + type: { + name: "DateTime" + } + }, + lastUpdatedBefore: { + serializedName: "lastUpdatedBefore", + required: true, + type: { + name: "DateTime" + } + }, + filters: { + serializedName: "filters", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RunQueryFilter" + } + } + } + }, + orderBy: { + serializedName: "orderBy", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RunQueryOrderBy" + } + } + } + } + } + } +}; + +export const RunQueryFilter: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RunQueryFilter", + modelProperties: { + operand: { + serializedName: "operand", + required: true, + type: { + name: "String" + } + }, + operator: { + serializedName: "operator", + required: true, + type: { + name: "String" + } + }, + values: { + serializedName: "values", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const RunQueryOrderBy: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RunQueryOrderBy", + modelProperties: { + orderBy: { + serializedName: "orderBy", + required: true, + type: { + name: "String" + } + }, + order: { + serializedName: "order", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const PipelineRunsQueryResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PipelineRun" + } + } + } + }, + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } + } + } + } +}; + +export const PipelineRun: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineRun", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + runId: { + serializedName: "runId", + readOnly: true, + type: { + name: "String" + } + }, + runGroupId: { + serializedName: "runGroupId", + readOnly: true, + type: { + name: "String" + } + }, + isLatest: { + serializedName: "isLatest", + readOnly: true, + type: { + name: "Boolean" + } + }, + pipelineName: { + serializedName: "pipelineName", + readOnly: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + invokedBy: { + serializedName: "invokedBy", + type: { + name: "Composite", + className: "PipelineRunInvokedBy" + } + }, + lastUpdated: { + serializedName: "lastUpdated", + readOnly: true, + type: { + name: "DateTime" + } + }, + runStart: { + serializedName: "runStart", + readOnly: true, + type: { + name: "DateTime" + } + }, + runEnd: { + serializedName: "runEnd", + readOnly: true, + type: { + name: "DateTime" + } + }, + durationInMs: { + serializedName: "durationInMs", + readOnly: true, + type: { + name: "Number" + } + }, + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const PipelineRunInvokedBy: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineRunInvokedBy", + modelProperties: { + name: { + serializedName: "name", + readOnly: true, + type: { + name: "String" + } + }, + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + invokedByType: { + serializedName: "invokedByType", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const ActivityRunsQueryResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ActivityRun" + } + } + } + }, + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } + } + } + } +}; + +export const ActivityRun: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityRun", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + pipelineName: { + serializedName: "pipelineName", + readOnly: true, + type: { + name: "String" + } + }, + pipelineRunId: { + serializedName: "pipelineRunId", + readOnly: true, + type: { + name: "String" + } + }, + activityName: { + serializedName: "activityName", + readOnly: true, + type: { + name: "String" + } + }, + activityType: { + serializedName: "activityType", + readOnly: true, + type: { + name: "String" + } + }, + activityRunId: { + serializedName: "activityRunId", + readOnly: true, + type: { + name: "String" + } + }, + linkedServiceName: { + serializedName: "linkedServiceName", + readOnly: true, + type: { + name: "String" + } + }, + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + activityRunStart: { + serializedName: "activityRunStart", + readOnly: true, + type: { + name: "DateTime" + } + }, + activityRunEnd: { + serializedName: "activityRunEnd", + readOnly: true, + type: { + name: "DateTime" + } + }, + durationInMs: { + serializedName: "durationInMs", + readOnly: true, + type: { + name: "Number" + } + }, + input: { + serializedName: "input", + readOnly: true, + type: { + name: "any" + } + }, + output: { + serializedName: "output", + readOnly: true, + type: { + name: "any" + } + }, + error: { + serializedName: "error", + readOnly: true, + type: { + name: "any" + } + } + } + } +}; + +export const TriggerListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "TriggerResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const Trigger: coreHttp.CompositeMapper = { + serializedName: "Trigger", + type: { + name: "Composite", + className: "Trigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + runtimeState: { + serializedName: "runtimeState", + readOnly: true, + type: { + name: "String" + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + } + } + } +}; + +export const TriggerSubscriptionOperationStatus: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerSubscriptionOperationStatus", + modelProperties: { + triggerName: { + serializedName: "triggerName", + readOnly: true, + type: { + name: "String" + } + }, + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const TriggerRunsQueryResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "TriggerRun" + } + } + } + }, + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } + } + } + } +}; + +export const TriggerRun: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerRun", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + triggerRunId: { + serializedName: "triggerRunId", + readOnly: true, + type: { + name: "String" + } + }, + triggerName: { + serializedName: "triggerName", + readOnly: true, + type: { + name: "String" + } + }, + triggerType: { + serializedName: "triggerType", + readOnly: true, + type: { + name: "String" + } + }, + triggerRunTimestamp: { + serializedName: "triggerRunTimestamp", + readOnly: true, + type: { + name: "DateTime" + } + }, + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + readOnly: true, + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + triggeredPipelines: { + serializedName: "triggeredPipelines", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; + +export const DataFlow: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlow", + uberParent: "DataFlow", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + folder: { + serializedName: "folder", + type: { + name: "Composite", + className: "DataFlowFolder" + } + } + } + } +}; + +export const DataFlowFolder: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowFolder", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const CreateDataFlowDebugSessionRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CreateDataFlowDebugSessionRequest", + modelProperties: { + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + existingClusterId: { + serializedName: "existingClusterId", + type: { + name: "String" + } + }, + clusterTimeout: { + serializedName: "clusterTimeout", + type: { + name: "Number" + } + }, + newClusterName: { + serializedName: "newClusterName", + type: { + name: "String" + } + }, + newClusterNodeType: { + serializedName: "newClusterNodeType", + type: { + name: "String" + } + }, + dataBricksLinkedService: { + serializedName: "dataBricksLinkedService", + type: { + name: "Composite", + className: "LinkedServiceResource" + } + } + } + } +}; + +export const CreateDataFlowDebugSessionResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CreateDataFlowDebugSessionResponse", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + } + } + } +}; + +export const QueryDataFlowDebugSessionsResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "QueryDataFlowDebugSessionsResponse", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowDebugSessionInfo" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugSessionInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugSessionInfo", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + computeType: { + serializedName: "computeType", + type: { + name: "String" + } + }, + coreCount: { + serializedName: "coreCount", + type: { + name: "Number" + } + }, + nodeCount: { + serializedName: "nodeCount", + type: { + name: "Number" + } + }, + integrationRuntimeName: { + serializedName: "integrationRuntimeName", + type: { + name: "String" + } + }, + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + startTime: { + serializedName: "startTime", + type: { + name: "String" + } + }, + timeToLiveInMinutes: { + serializedName: "timeToLiveInMinutes", + type: { + name: "Number" + } + }, + lastActivityTime: { + serializedName: "lastActivityTime", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugPackage: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugPackage", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlow: { + serializedName: "dataFlow", + type: { + name: "Composite", + className: "DataFlowDebugResource" + } + }, + datasets: { + serializedName: "datasets", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetDebugResource" + } + } + } + }, + linkedServices: { + serializedName: "linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceDebugResource" + } + } + } + }, + staging: { + serializedName: "staging", + type: { + name: "Composite", + className: "DataFlowStagingInfo" + } + }, + debugSettings: { + serializedName: "debugSettings", + type: { + name: "Composite", + className: "DataFlowDebugPackageDebugSettings" + } + } + } + } +}; + +export const SubResourceDebugResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SubResourceDebugResource", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowStagingInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowStagingInfo", + modelProperties: { + linkedService: { + serializedName: "linkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + folderPath: { + serializedName: "folderPath", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugPackageDebugSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugPackageDebugSettings", + modelProperties: { + sourceSettings: { + serializedName: "sourceSettings", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSourceSetting" + } + } + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + datasetParameters: { + serializedName: "datasetParameters", + type: { + name: "any" + } + } + } + } +}; + +export const DataFlowSourceSetting: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowSourceSetting", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + sourceName: { + serializedName: "sourceName", + type: { + name: "String" + } + }, + rowLimit: { + serializedName: "rowLimit", + type: { + name: "Number" + } + } + } + } +}; + +export const AddDataFlowToDebugSessionResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AddDataFlowToDebugSessionResponse", + modelProperties: { + jobVersion: { + serializedName: "jobVersion", + type: { + name: "String" + } + } + } + } +}; + +export const DeleteDataFlowDebugSessionRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DeleteDataFlowDebugSessionRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugCommandRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugCommandRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + required: true, + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + commandName: { + serializedName: "commandName", + type: { + name: "String" + } + }, + commandPayload: { + serializedName: "commandPayload", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const DataFlowDebugCommandResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugCommandResponse", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + data: { + serializedName: "data", + type: { + name: "String" + } + } + } + } +}; + +export const SqlScriptsListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScriptsListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SqlScriptResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const SqlScriptResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScriptResource", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + readOnly: true, + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "SqlScript" + } + } + } + } +}; + +export const SqlScript: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScript", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + description: { + serializedName: "description", + type: { + name: "String" + } + }, + type: { + serializedName: "type", + type: { + name: "String" + } + }, + content: { + serializedName: "content", + type: { + name: "Composite", + className: "SqlScriptContent" + } + } + } + } +}; + +export const SqlScriptContent: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScriptContent", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + query: { + serializedName: "query", + required: true, + type: { + name: "String" + } + }, + currentConnection: { + serializedName: "currentConnection", + type: { + name: "Composite", + className: "SqlConnection" + } + }, + metadata: { + serializedName: "metadata", + type: { + name: "Composite", + className: "SqlScriptMetadata" + } + } + } + } +}; + +export const SqlConnection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlConnection", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SqlScriptMetadata: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScriptMetadata", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + language: { + serializedName: "language", + type: { + name: "String" + } + } + } + } +}; + +export const SparkJobDefinitionsListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobDefinitionsListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkJobDefinitionResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const SparkJobDefinition: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobDefinition", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + description: { + serializedName: "description", + type: { + name: "String" + } + }, + targetBigDataPool: { + serializedName: "targetBigDataPool", + type: { + name: "Composite", + className: "BigDataPoolReference" + } + }, + requiredSparkVersion: { + serializedName: "requiredSparkVersion", + type: { + name: "String" + } + }, + language: { + serializedName: "language", + type: { + name: "String" + } + }, + jobProperties: { + serializedName: "jobProperties", + type: { + name: "Composite", + className: "SparkJobProperties" + } + } + } + } +}; + +export const BigDataPoolReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "BigDataPoolReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SparkJobProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + file: { + serializedName: "file", + required: true, + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + conf: { + serializedName: "conf", + type: { + name: "any" + } + }, + args: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + driverMemory: { + serializedName: "driverMemory", + required: true, + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + required: true, + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + required: true, + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + required: true, + type: { + name: "Number" + } + }, + numExecutors: { + serializedName: "numExecutors", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const SparkBatchJob: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJob", + modelProperties: { + livyInfo: { + serializedName: "livyInfo", + type: { + name: "Composite", + className: "SparkBatchJobState" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + workspaceName: { + serializedName: "workspaceName", + type: { + name: "String" + } + }, + sparkPoolName: { + serializedName: "sparkPoolName", + type: { + name: "String" + } + }, + submitterName: { + serializedName: "submitterName", + type: { + name: "String" + } + }, + submitterId: { + serializedName: "submitterId", + type: { + name: "String" + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + jobType: { + serializedName: "jobType", + type: { + name: "String" + } + }, + result: { + serializedName: "result", + type: { + name: "String" + } + }, + scheduler: { + serializedName: "schedulerInfo", + type: { + name: "Composite", + className: "SparkScheduler" + } + }, + plugin: { + serializedName: "pluginInfo", + type: { + name: "Composite", + className: "SparkServicePlugin" + } + }, + errors: { + serializedName: "errorInfo", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkServiceError" + } + } + } + }, + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + id: { + serializedName: "id", + required: true, + type: { + name: "Number" + } + }, + appId: { + serializedName: "appId", + nullable: true, + type: { + name: "String" + } + }, + appInfo: { + serializedName: "appInfo", + nullable: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + state: { + serializedName: "state", + type: { + name: "String" + } + }, + logLines: { + serializedName: "log", + nullable: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const SparkBatchJobState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJobState", + modelProperties: { + notStartedAt: { + serializedName: "notStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + startingAt: { + serializedName: "startingAt", + nullable: true, + type: { + name: "DateTime" + } + }, + runningAt: { + serializedName: "runningAt", + nullable: true, + type: { + name: "DateTime" + } + }, + deadAt: { + serializedName: "deadAt", + nullable: true, + type: { + name: "DateTime" + } + }, + successAt: { + serializedName: "successAt", + nullable: true, + type: { + name: "DateTime" + } + }, + terminatedAt: { + serializedName: "killedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + recoveringAt: { + serializedName: "recoveringAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + }, + jobCreationRequest: { + serializedName: "jobCreationRequest", + type: { + name: "Composite", + className: "SparkRequest" + } + } + } + } +}; + +export const SparkRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkRequest", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + file: { + serializedName: "file", + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + arguments: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + pythonFiles: { + serializedName: "pyFiles", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + configuration: { + serializedName: "conf", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + driverMemory: { + serializedName: "driverMemory", + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + type: { + name: "Number" + } + }, + executorCount: { + serializedName: "numExecutors", + type: { + name: "Number" + } + } + } + } +}; + +export const SparkScheduler: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkScheduler", + modelProperties: { + submittedAt: { + serializedName: "submittedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + scheduledAt: { + serializedName: "scheduledAt", + nullable: true, + type: { + name: "DateTime" + } + }, + endedAt: { + serializedName: "endedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + cancellationRequestedAt: { + serializedName: "cancellationRequestedAt", + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + } + } + } +}; + +export const SparkServicePlugin: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkServicePlugin", + modelProperties: { + preparationStartedAt: { + serializedName: "preparationStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + resourceAcquisitionStartedAt: { + serializedName: "resourceAcquisitionStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + submissionStartedAt: { + serializedName: "submissionStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + monitoringStartedAt: { + serializedName: "monitoringStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + cleanupStartedAt: { + serializedName: "cleanupStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + } + } + } +}; + +export const SparkServiceError: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkServiceError", + modelProperties: { + message: { + serializedName: "message", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "errorCode", + type: { + name: "String" + } + }, + source: { + serializedName: "source", + type: { + name: "String" + } + } + } + } +}; + +export const NotebookListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "NotebookResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const NotebookResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookResource", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + readOnly: true, + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Notebook" + } + } + } + } +}; + +export const Notebook: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Notebook", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + description: { + serializedName: "description", + type: { + name: "String" + } + }, + bigDataPool: { + serializedName: "bigDataPool", + type: { + name: "Composite", + className: "BigDataPoolReference" + } + }, + sessionProperties: { + serializedName: "sessionProperties", + type: { + name: "Composite", + className: "NotebookSessionProperties" + } + }, + metadata: { + serializedName: "metadata", + type: { + name: "Composite", + className: "NotebookMetadata" + } + }, + nbformat: { + serializedName: "nbformat", + required: true, + type: { + name: "Number" + } + }, + nbformatMinor: { + serializedName: "nbformat_minor", + required: true, + type: { + name: "Number" + } + }, + cells: { + serializedName: "cells", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "NotebookCell" + } + } + } + } + } + } +}; + +export const NotebookSessionProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookSessionProperties", + modelProperties: { + driverMemory: { + serializedName: "driverMemory", + required: true, + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + required: true, + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + required: true, + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + required: true, + type: { + name: "Number" + } + }, + numExecutors: { + serializedName: "numExecutors", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const NotebookMetadata: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookMetadata", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + kernelspec: { + serializedName: "kernelspec", + type: { + name: "Composite", + className: "NotebookKernelSpec" + } + }, + languageInfo: { + serializedName: "language_info", + type: { + name: "Composite", + className: "NotebookLanguageInfo" + } + } + } + } +}; + +export const NotebookKernelSpec: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookKernelSpec", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + displayName: { + serializedName: "display_name", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const NotebookLanguageInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookLanguageInfo", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + codemirrorMode: { + serializedName: "codemirror_mode", + type: { + name: "String" + } + } + } + } +}; + +export const NotebookCell: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookCell", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + cellType: { + serializedName: "cell_type", + required: true, + type: { + name: "String" + } + }, + metadata: { + serializedName: "metadata", + required: true, + type: { + name: "any" + } + }, + source: { + serializedName: "source", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + attachments: { + serializedName: "attachments", + type: { + name: "any" + } + }, + outputs: { + serializedName: "outputs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "NotebookCellOutputItem" + } + } + } + } + } + } +}; + +export const NotebookCellOutputItem: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookCellOutputItem", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + executionCount: { + serializedName: "execution_count", + type: { + name: "Number" + } + }, + outputType: { + serializedName: "output_type", + required: true, + type: { + name: "String" + } + }, + text: { + serializedName: "text", + type: { + name: "any" + } + }, + data: { + serializedName: "data", + type: { + name: "any" + } + }, + metadata: { + serializedName: "metadata", + type: { + name: "any" + } + } + } + } +}; + +export const DataLakeStorageAccountDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataLakeStorageAccountDetails", + modelProperties: { + accountUrl: { + serializedName: "accountUrl", + type: { + name: "String" + } + }, + filesystem: { + serializedName: "filesystem", + type: { + name: "String" + } + } + } + } +}; + +export const VirtualNetworkProfile: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "VirtualNetworkProfile", + modelProperties: { + computeSubnetId: { + serializedName: "computeSubnetId", + type: { + name: "String" + } + } + } + } +}; + +export const PrivateEndpoint: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PrivateEndpoint", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const PrivateLinkServiceConnectionState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PrivateLinkServiceConnectionState", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + actionsRequired: { + serializedName: "actionsRequired", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const EncryptionDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "EncryptionDetails", + modelProperties: { + doubleEncryptionEnabled: { + serializedName: "doubleEncryptionEnabled", + readOnly: true, + type: { + name: "Boolean" + } + }, + cmk: { + serializedName: "cmk", + type: { + name: "Composite", + className: "CustomerManagedKeyDetails" + } + } + } + } +}; + +export const CustomerManagedKeyDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CustomerManagedKeyDetails", + modelProperties: { + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + key: { + serializedName: "key", + type: { + name: "Composite", + className: "WorkspaceKeyDetails" + } + } + } + } +}; + +export const WorkspaceKeyDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WorkspaceKeyDetails", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + keyVaultUrl: { + serializedName: "keyVaultUrl", + type: { + name: "String" + } + } + } + } +}; + +export const ManagedVirtualNetworkSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedVirtualNetworkSettings", + modelProperties: { + preventDataExfiltration: { + serializedName: "preventDataExfiltration", + type: { + name: "Boolean" + } + }, + linkedAccessCheckOnTargetResource: { + serializedName: "linkedAccessCheckOnTargetResource", + type: { + name: "Boolean" + } + }, + allowedAadTenantIdsForLinking: { + serializedName: "allowedAadTenantIdsForLinking", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const WorkspaceRepositoryConfiguration: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WorkspaceRepositoryConfiguration", + modelProperties: { + type: { + serializedName: "type", + type: { + name: "String" + } + }, + hostName: { + serializedName: "hostName", + type: { + name: "String" + } + }, + accountName: { + serializedName: "accountName", + type: { + name: "String" + } + }, + projectName: { + serializedName: "projectName", + type: { + name: "String" + } + }, + repositoryName: { + serializedName: "repositoryName", + type: { + name: "String" + } + }, + collaborationBranch: { + serializedName: "collaborationBranch", + type: { + name: "String" + } + }, + rootFolder: { + serializedName: "rootFolder", + type: { + name: "String" + } + } + } + } +}; + +export const PurviewConfiguration: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PurviewConfiguration", + modelProperties: { + purviewResourceId: { + serializedName: "purviewResourceId", + type: { + name: "String" + } + } + } + } +}; + +export const ManagedIdentity: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedIdentity", + modelProperties: { + principalId: { + serializedName: "principalId", + readOnly: true, + type: { + name: "String" + } + }, + tenantId: { + serializedName: "tenantId", + readOnly: true, + type: { + name: "Uuid" + } + }, + type: { + serializedName: "type", + type: { + name: "Enum", + allowedValues: ["None", "SystemAssigned"] + } + } + } + } +}; + +export const ErrorContract: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorContract", + modelProperties: { + error: { + serializedName: "error", + type: { + name: "Composite", + className: "ErrorResponse" + } + } + } + } +}; + +export const ErrorResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorResponse", + modelProperties: { + code: { + serializedName: "code", + readOnly: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + readOnly: true, + type: { + name: "String" + } + }, + target: { + serializedName: "target", + readOnly: true, + type: { + name: "String" + } + }, + details: { + serializedName: "details", + readOnly: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorResponse" + } + } + } + }, + additionalInfo: { + serializedName: "additionalInfo", + readOnly: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorAdditionalInfo" + } + } + } + } + } + } +}; + +export const ErrorAdditionalInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorAdditionalInfo", + modelProperties: { + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + info: { + serializedName: "info", + readOnly: true, + type: { + name: "any" + } + } + } + } +}; + +export const SqlPoolInfoListResult: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlPoolInfoListResult", + modelProperties: { + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + }, + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SqlPool" + } + } + } + } + } + } +}; + +export const Sku: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Sku", + modelProperties: { + tier: { + serializedName: "tier", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + capacity: { + serializedName: "capacity", + type: { + name: "Number" + } + } + } + } +}; + +export const BigDataPoolResourceInfoListResult: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "BigDataPoolResourceInfoListResult", + modelProperties: { + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + }, + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BigDataPoolResourceInfo" + } + } + } + } + } + } +}; + +export const AutoScaleProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AutoScaleProperties", + modelProperties: { + minNodeCount: { + serializedName: "minNodeCount", + type: { + name: "Number" + } + }, + enabled: { + serializedName: "enabled", + type: { + name: "Boolean" + } + }, + maxNodeCount: { + serializedName: "maxNodeCount", + type: { + name: "Number" + } + } + } + } +}; + +export const AutoPauseProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AutoPauseProperties", + modelProperties: { + delayInMinutes: { + serializedName: "delayInMinutes", + type: { + name: "Number" + } + }, + enabled: { + serializedName: "enabled", + type: { + name: "Boolean" + } + } + } + } +}; + +export const LibraryRequirements: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LibraryRequirements", + modelProperties: { + time: { + serializedName: "time", + readOnly: true, + type: { + name: "DateTime" + } + }, + content: { + serializedName: "content", + type: { + name: "String" + } + }, + filename: { + serializedName: "filename", + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntimeListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "IntegrationRuntimeResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntime: coreHttp.CompositeMapper = { + serializedName: "IntegrationRuntime", + type: { + name: "Composite", + className: "IntegrationRuntime", + uberParent: "IntegrationRuntime", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + } + } + } +}; + +export const GitHubAccessTokenRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "GitHubAccessTokenRequest", + modelProperties: { + gitHubClientId: { + serializedName: "gitHubClientId", + required: true, + type: { + name: "String" + } + }, + gitHubAccessCode: { + serializedName: "gitHubAccessCode", + required: true, + type: { + name: "String" + } + }, + gitHubAccessTokenBaseUrl: { + serializedName: "gitHubAccessTokenBaseUrl", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const GitHubAccessTokenResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "GitHubAccessTokenResponse", + modelProperties: { + gitHubAccessToken: { + serializedName: "gitHubAccessToken", + type: { + name: "String" + } + } + } + } +}; + +export const Expression: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Expression", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + value: { + serializedName: "value", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SecretBase: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SecretBase", + uberParent: "SecretBase", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const StartDataFlowDebugSessionRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "StartDataFlowDebugSessionRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlow: { + serializedName: "dataFlow", + type: { + name: "Composite", + className: "DataFlowResource" + } + }, + datasets: { + serializedName: "datasets", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetResource" + } + } + } + }, + linkedServices: { + serializedName: "linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceResource" + } + } + } + }, + staging: { + serializedName: "staging", + type: { + name: "any" + } + }, + debugSettings: { + serializedName: "debugSettings", + type: { + name: "any" + } + }, + incrementalDebug: { + serializedName: "incrementalDebug", + type: { + name: "Boolean" + } + } + } + } +}; + +export const StartDataFlowDebugSessionResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "StartDataFlowDebugSessionResponse", + modelProperties: { + jobVersion: { + serializedName: "jobVersion", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugPreviewDataRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugPreviewDataRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + rowLimits: { + serializedName: "rowLimits", + type: { + name: "Number" + } + } + } + } +}; + +export const DataFlowDebugStatisticsRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugStatisticsRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + columns: { + serializedName: "columns", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const EvaluateDataFlowExpressionRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "EvaluateDataFlowExpressionRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + rowLimits: { + serializedName: "rowLimits", + type: { + name: "Number" + } + }, + expression: { + serializedName: "expression", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugQueryResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugQueryResponse", + modelProperties: { + runId: { + serializedName: "runId", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugResultResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugResultResponse", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + data: { + serializedName: "data", + type: { + name: "String" + } + } + } + } +}; + +export const TriggerDependencyProvisioningStatus: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerDependencyProvisioningStatus", + modelProperties: { + triggerName: { + serializedName: "triggerName", + required: true, + type: { + name: "String" + } + }, + provisioningStatus: { + serializedName: "provisioningStatus", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const PipelineReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const TriggerPipelineReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerPipelineReference", + modelProperties: { + pipelineReference: { + serializedName: "pipelineReference", + type: { + name: "Composite", + className: "PipelineReference" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const WorkspaceUpdateParameters: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WorkspaceUpdateParameters", + modelProperties: { + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + identity: { + serializedName: "identity", + type: { + name: "Composite", + className: "WorkspaceIdentity" + } + } + } + } +}; + +export const WorkspaceIdentity: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WorkspaceIdentity", + modelProperties: { + type: { + defaultValue: "SystemAssigned", + isConstant: true, + serializedName: "type", + type: { + name: "String" + } + }, + principalId: { + serializedName: "principalId", + readOnly: true, + type: { + name: "String" + } + }, + tenantId: { + serializedName: "tenantId", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const DatasetReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const DataFlowReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowReference", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + datasetParameters: { + serializedName: "datasetParameters", + type: { + name: "any" + } + } + } + } +}; + +export const RerunTumblingWindowTriggerActionParameters: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RerunTumblingWindowTriggerActionParameters", + modelProperties: { + startTime: { + serializedName: "startTime", + required: true, + type: { + name: "DateTime" + } + }, + endTime: { + serializedName: "endTime", + required: true, + type: { + name: "DateTime" + } + }, + maxConcurrency: { + constraints: { + InclusiveMaximum: 50, + InclusiveMinimum: 1 + }, + serializedName: "maxConcurrency", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const RerunTriggerListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RerunTriggerListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RerunTriggerResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const GetSsisObjectMetadataRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "GetSsisObjectMetadataRequest", + modelProperties: { + metadataPath: { + serializedName: "metadataPath", + type: { + name: "String" + } + } + } + } +}; + +export const SsisObjectMetadataStatusResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisObjectMetadataStatusResponse", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "String" + } + }, + error: { + serializedName: "error", + type: { + name: "String" + } + } + } + } +}; + +export const ExposureControlRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ExposureControlRequest", + modelProperties: { + featureName: { + serializedName: "featureName", + type: { + name: "String" + } + }, + featureType: { + serializedName: "featureType", + type: { + name: "String" + } + } + } + } +}; + +export const ExposureControlResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ExposureControlResponse", + modelProperties: { + featureName: { + serializedName: "featureName", + readOnly: true, + type: { + name: "String" + } + }, + value: { + serializedName: "value", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const SynapseNotebookReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SynapseNotebookReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SynapseSparkJobReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SynapseSparkJobReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SqlPoolReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlPoolReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const Transformation: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Transformation", + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetLocation: coreHttp.CompositeMapper = { + serializedName: "DatasetLocation", + type: { + name: "Composite", + className: "DatasetLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + folderPath: { + serializedName: "folderPath", + type: { + name: "any" + } + }, + fileName: { + serializedName: "fileName", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetDataElement: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetDataElement", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetSchemaDataElement: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetSchemaDataElement", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetStorageFormat: coreHttp.CompositeMapper = { + serializedName: "DatasetStorageFormat", + type: { + name: "Composite", + className: "DatasetStorageFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + serializer: { + serializedName: "serializer", + type: { + name: "any" + } + }, + deserializer: { + serializedName: "deserializer", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetCompression: coreHttp.CompositeMapper = { + serializedName: "DatasetCompression", + type: { + name: "Composite", + className: "DatasetCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const WebLinkedServiceTypeProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WebLinkedServiceTypeProperties", + uberParent: "WebLinkedServiceTypeProperties", + polymorphicDiscriminator: { + serializedName: "authenticationType", + clientName: "authenticationType" + }, + modelProperties: { + url: { + serializedName: "url", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "authenticationType", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const ScriptAction: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ScriptAction", + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + uri: { + serializedName: "uri", + required: true, + type: { + name: "String" + } + }, + roles: { + serializedName: "roles", + required: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "String" + } + } + } + } +}; + +export const ActivityPolicy: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityPolicy", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + timeout: { + serializedName: "timeout", + type: { + name: "any" + } + }, + retry: { + serializedName: "retry", + type: { + name: "any" + } + }, + retryIntervalInSeconds: { + constraints: { + InclusiveMaximum: 86400, + InclusiveMinimum: 30 + }, + serializedName: "retryIntervalInSeconds", + type: { + name: "Number" + } + }, + secureInput: { + serializedName: "secureInput", + type: { + name: "Boolean" + } + }, + secureOutput: { + serializedName: "secureOutput", + type: { + name: "Boolean" + } + } + } + } +}; + +export const StoreReadSettings: coreHttp.CompositeMapper = { + serializedName: "StoreReadSettings", + type: { + name: "Composite", + className: "StoreReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + maxConcurrentConnections: { + serializedName: "maxConcurrentConnections", + type: { + name: "any" + } + } + } + } +}; + +export const StoreWriteSettings: coreHttp.CompositeMapper = { + serializedName: "StoreWriteSettings", + type: { + name: "Composite", + className: "StoreWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + maxConcurrentConnections: { + serializedName: "maxConcurrentConnections", + type: { + name: "any" + } + }, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const DistcpSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DistcpSettings", + modelProperties: { + resourceManagerEndpoint: { + serializedName: "resourceManagerEndpoint", + required: true, + type: { + name: "any" + } + }, + tempScriptPath: { + serializedName: "tempScriptPath", + required: true, + type: { + name: "any" + } + }, + distcpOptions: { + serializedName: "distcpOptions", + type: { + name: "any" + } + } + } + } +}; + +export const FormatReadSettings: coreHttp.CompositeMapper = { + serializedName: "FormatReadSettings", + type: { + name: "Composite", + className: "FormatReadSettings", + uberParent: "FormatReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const FormatWriteSettings: coreHttp.CompositeMapper = { + serializedName: "FormatWriteSettings", + type: { + name: "Composite", + className: "FormatWriteSettings", + uberParent: "FormatWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const CopySource: coreHttp.CompositeMapper = { + serializedName: "CopySource", + type: { + name: "Composite", + className: "CopySource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + sourceRetryCount: { + serializedName: "sourceRetryCount", + type: { + name: "any" + } + }, + sourceRetryWait: { + serializedName: "sourceRetryWait", + type: { + name: "any" + } + }, + maxConcurrentConnections: { + serializedName: "maxConcurrentConnections", + type: { + name: "any" + } + } + } + } +}; + +export const CopySink: coreHttp.CompositeMapper = { + serializedName: "CopySink", + type: { + name: "Composite", + className: "CopySink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + writeBatchSize: { + serializedName: "writeBatchSize", + type: { + name: "any" + } + }, + writeBatchTimeout: { + serializedName: "writeBatchTimeout", + type: { + name: "any" + } + }, + sinkRetryCount: { + serializedName: "sinkRetryCount", + type: { + name: "any" + } + }, + sinkRetryWait: { + serializedName: "sinkRetryWait", + type: { + name: "any" + } + }, + maxConcurrentConnections: { + serializedName: "maxConcurrentConnections", + type: { + name: "any" + } + } + } + } +}; + +export const StagingSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "StagingSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + path: { + serializedName: "path", + type: { + name: "any" + } + }, + enableCompression: { + serializedName: "enableCompression", + type: { + name: "any" + } + } + } + } +}; + +export const RedirectIncompatibleRowSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RedirectIncompatibleRowSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + linkedServiceName: { + serializedName: "linkedServiceName", + required: true, + type: { + name: "any" + } + }, + path: { + serializedName: "path", + type: { + name: "any" + } + } + } + } +}; + +export const SapHanaPartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SapHanaPartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + } + } + } +}; + +export const SapTablePartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SapTablePartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + }, + maxPartitionsNumber: { + serializedName: "maxPartitionsNumber", + type: { + name: "any" + } + } + } + } +}; + +export const StoredProcedureParameter: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "StoredProcedureParameter", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "String" + } + } + } + } +}; + +export const OraclePartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "OraclePartitionSettings", + modelProperties: { + partitionNames: { + serializedName: "partitionNames", + type: { + name: "any" + } + }, + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + } + } + } +}; + +export const TeradataPartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TeradataPartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbCursorMethodsProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "MongoDbCursorMethodsProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + project: { + serializedName: "project", + type: { + name: "any" + } + }, + sort: { + serializedName: "sort", + type: { + name: "any" + } + }, + skip: { + serializedName: "skip", + type: { + name: "any" + } + }, + limit: { + serializedName: "limit", + type: { + name: "any" + } + } + } + } +}; + +export const NetezzaPartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NetezzaPartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + } + } + } +}; + +export const RedshiftUnloadSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RedshiftUnloadSettings", + modelProperties: { + s3LinkedServiceName: { + serializedName: "s3LinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + bucketName: { + serializedName: "bucketName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const PolybaseSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PolybaseSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + rejectType: { + serializedName: "rejectType", + type: { + name: "String" + } + }, + rejectValue: { + serializedName: "rejectValue", + type: { + name: "any" + } + }, + rejectSampleValue: { + serializedName: "rejectSampleValue", + type: { + name: "any" + } + }, + useTypeDefault: { + serializedName: "useTypeDefault", + type: { + name: "any" + } + } + } + } +}; + +export const DWCopyCommandSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DWCopyCommandSettings", + modelProperties: { + defaultValues: { + serializedName: "defaultValues", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DWCopyCommandDefaultValue" + } + } + } + }, + additionalOptions: { + serializedName: "additionalOptions", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; + +export const DWCopyCommandDefaultValue: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DWCopyCommandDefaultValue", + modelProperties: { + columnName: { + serializedName: "columnName", + type: { + name: "any" + } + }, + defaultValue: { + serializedName: "defaultValue", + type: { + name: "any" + } + } + } + } +}; + +export const LogStorageSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LogStorageSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + path: { + serializedName: "path", + type: { + name: "any" + } + } + } + } +}; + +export const CopyTranslator: coreHttp.CompositeMapper = { + serializedName: "CopyTranslator", + type: { + name: "Composite", + className: "CopyTranslator", + uberParent: "CopyTranslator", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SsisPackageLocation: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisPackageLocation", + modelProperties: { + packagePath: { + serializedName: "packagePath", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "String" + } + }, + packagePassword: { + serializedName: "typeProperties.packagePassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessCredential: { + serializedName: "typeProperties.accessCredential", + type: { + name: "Composite", + className: "SsisAccessCredential" + } + }, + configurationPath: { + serializedName: "typeProperties.configurationPath", + type: { + name: "any" + } + }, + packageName: { + serializedName: "typeProperties.packageName", + type: { + name: "String" + } + }, + packageContent: { + serializedName: "typeProperties.packageContent", + type: { + name: "any" + } + }, + packageLastModifiedDate: { + serializedName: "typeProperties.packageLastModifiedDate", + type: { + name: "String" + } + }, + childPackages: { + serializedName: "typeProperties.childPackages", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SsisChildPackage" + } + } + } + } + } + } +}; + +export const SsisAccessCredential: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisAccessCredential", + modelProperties: { + domain: { + serializedName: "domain", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "userName", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecretBase" + } + } + } + } +}; + +export const SsisChildPackage: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisChildPackage", + modelProperties: { + packagePath: { + serializedName: "packagePath", + required: true, + type: { + name: "any" + } + }, + packageName: { + serializedName: "packageName", + type: { + name: "String" + } + }, + packageContent: { + serializedName: "packageContent", + required: true, + type: { + name: "any" + } + }, + packageLastModifiedDate: { + serializedName: "packageLastModifiedDate", + type: { + name: "String" + } + } + } + } +}; + +export const SsisExecutionCredential: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisExecutionCredential", + modelProperties: { + domain: { + serializedName: "domain", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "userName", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecureString" + } + } + } + } +}; + +export const SsisExecutionParameter: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisExecutionParameter", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const SsisPropertyOverride: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisPropertyOverride", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "any" + } + }, + isSensitive: { + serializedName: "isSensitive", + type: { + name: "Boolean" + } + } + } + } +}; + +export const SsisLogLocation: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisLogLocation", + modelProperties: { + logPath: { + serializedName: "logPath", + required: true, + type: { + name: "any" + } + }, + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + accessCredential: { + serializedName: "typeProperties.accessCredential", + type: { + name: "Composite", + className: "SsisAccessCredential" + } + }, + logRefreshInterval: { + serializedName: "typeProperties.logRefreshInterval", + type: { + name: "any" + } + } + } + } +}; + +export const CustomActivityReferenceObject: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CustomActivityReferenceObject", + modelProperties: { + linkedServices: { + serializedName: "linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + datasets: { + serializedName: "datasets", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } + } + } +}; + +export const WebActivityAuthentication: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WebActivityAuthentication", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + pfx: { + serializedName: "pfx", + type: { + name: "Composite", + className: "SecretBase" + } + }, + username: { + serializedName: "username", + type: { + name: "String" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + resource: { + serializedName: "resource", + type: { + name: "String" + } + } + } + } +}; + +export const SwitchCase: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SwitchCase", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "String" + } + }, + activities: { + serializedName: "activities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const AzureMLWebServiceFile: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AzureMLWebServiceFile", + modelProperties: { + filePath: { + serializedName: "filePath", + required: true, + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + } +}; + +export const ExecuteDataFlowActivityTypePropertiesCompute: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ExecuteDataFlowActivityTypePropertiesCompute", + modelProperties: { + computeType: { + serializedName: "computeType", + type: { + name: "String" + } + }, + coreCount: { + serializedName: "coreCount", + type: { + name: "Number" + } + } + } + } +}; + +export const ScheduleTriggerRecurrence: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ScheduleTriggerRecurrence", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + frequency: { + serializedName: "frequency", + type: { + name: "String" + } + }, + interval: { + serializedName: "interval", + type: { + name: "Number" + } + }, + startTime: { + serializedName: "startTime", + type: { + name: "DateTime" + } + }, + endTime: { + serializedName: "endTime", + type: { + name: "DateTime" + } + }, + timeZone: { + serializedName: "timeZone", + type: { + name: "String" + } + }, + schedule: { + serializedName: "schedule", + type: { + name: "Composite", + className: "RecurrenceSchedule" + } + } + } + } +}; + +export const RecurrenceSchedule: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RecurrenceSchedule", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + minutes: { + serializedName: "minutes", + type: { + name: "Sequence", + element: { + type: { + name: "Number" + } + } + } + }, + hours: { + serializedName: "hours", + type: { + name: "Sequence", + element: { + type: { + name: "Number" + } + } + } + }, + weekDays: { + serializedName: "weekDays", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "Sunday", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday" + ] + } + } + } + }, + monthDays: { + serializedName: "monthDays", + type: { + name: "Sequence", + element: { + type: { + name: "Number" + } + } + } + }, + monthlyOccurrences: { + serializedName: "monthlyOccurrences", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RecurrenceScheduleOccurrence" + } + } + } + } + } + } +}; + +export const RecurrenceScheduleOccurrence: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RecurrenceScheduleOccurrence", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + day: { + serializedName: "day", + type: { + name: "Enum", + allowedValues: [ + "Sunday", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday" + ] + } + }, + occurrence: { + serializedName: "occurrence", + type: { + name: "Number" + } + } + } + } +}; + +export const RetryPolicy: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RetryPolicy", + modelProperties: { + count: { + serializedName: "count", + type: { + name: "any" + } + }, + intervalInSeconds: { + constraints: { + InclusiveMaximum: 86400, + InclusiveMinimum: 30 + }, + serializedName: "intervalInSeconds", + type: { + name: "Number" + } + } + } + } +}; + +export const DependencyReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DependencyReference", + uberParent: "DependencyReference", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const TriggerReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntimeComputeProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeComputeProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + location: { + serializedName: "location", + type: { + name: "String" + } + }, + nodeSize: { + serializedName: "nodeSize", + type: { + name: "String" + } + }, + numberOfNodes: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "numberOfNodes", + type: { + name: "Number" + } + }, + maxParallelExecutionsPerNode: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxParallelExecutionsPerNode", + type: { + name: "Number" + } + }, + dataFlowProperties: { + serializedName: "dataFlowProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeDataFlowProperties" + } + }, + vNetProperties: { + serializedName: "vNetProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeVNetProperties" + } + } + } + } +}; + +export const IntegrationRuntimeDataFlowProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeDataFlowProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + computeType: { + serializedName: "computeType", + type: { + name: "String" + } + }, + coreCount: { + serializedName: "coreCount", + type: { + name: "Number" + } + }, + timeToLive: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeToLive", + type: { + name: "Number" + } + } + } + } +}; + +export const IntegrationRuntimeVNetProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeVNetProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + vNetId: { + serializedName: "vNetId", + type: { + name: "String" + } + }, + subnet: { + serializedName: "subnet", + type: { + name: "String" + } + }, + publicIPs: { + serializedName: "publicIPs", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const IntegrationRuntimeSsisProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeSsisProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + catalogInfo: { + serializedName: "catalogInfo", + type: { + name: "Composite", + className: "IntegrationRuntimeSsisCatalogInfo" + } + }, + licenseType: { + serializedName: "licenseType", + type: { + name: "String" + } + }, + customSetupScriptProperties: { + serializedName: "customSetupScriptProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeCustomSetupScriptProperties" + } + }, + dataProxyProperties: { + serializedName: "dataProxyProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeDataProxyProperties" + } + }, + edition: { + serializedName: "edition", + type: { + name: "String" + } + }, + expressCustomSetupProperties: { + serializedName: "expressCustomSetupProperties", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CustomSetupBase" + } + } + } + } + } + } +}; + +export const IntegrationRuntimeSsisCatalogInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeSsisCatalogInfo", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + catalogServerEndpoint: { + serializedName: "catalogServerEndpoint", + type: { + name: "String" + } + }, + catalogAdminUserName: { + constraints: { + MaxLength: 128, + MinLength: 1 + }, + serializedName: "catalogAdminUserName", + type: { + name: "String" + } + }, + catalogAdminPassword: { + serializedName: "catalogAdminPassword", + type: { + name: "Composite", + className: "SecureString" + } + }, + catalogPricingTier: { + serializedName: "catalogPricingTier", + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntimeCustomSetupScriptProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeCustomSetupScriptProperties", + modelProperties: { + blobContainerUri: { + serializedName: "blobContainerUri", + type: { + name: "String" + } + }, + sasToken: { + serializedName: "sasToken", + type: { + name: "Composite", + className: "SecureString" + } + } + } + } +}; + +export const IntegrationRuntimeDataProxyProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeDataProxyProperties", + modelProperties: { + connectVia: { + serializedName: "connectVia", + type: { + name: "Composite", + className: "EntityReference" + } + }, + stagingLinkedService: { + serializedName: "stagingLinkedService", + type: { + name: "Composite", + className: "EntityReference" + } + }, + path: { + serializedName: "path", + type: { + name: "String" + } + } + } + } +}; + +export const EntityReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "EntityReference", + modelProperties: { + type: { + serializedName: "type", + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + type: { + name: "String" + } + } + } + } +}; + +export const CustomSetupBase: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CustomSetupBase", + uberParent: "CustomSetupBase", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const LinkedIntegrationRuntimeType: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedIntegrationRuntimeType", + uberParent: "LinkedIntegrationRuntimeType", + polymorphicDiscriminator: { + serializedName: "authorizationType", + clientName: "authorizationType" + }, + modelProperties: { + authorizationType: { + serializedName: "authorizationType", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const AzureStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureStorage", + type: { + name: "Composite", + className: "AzureStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + sasUri: { + serializedName: "typeProperties.sasUri", + type: { + name: "any" + } + }, + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const AzureBlobStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorage", + type: { + name: "Composite", + className: "AzureBlobStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + sasUri: { + serializedName: "typeProperties.sasUri", + type: { + name: "any" + } + }, + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + serviceEndpoint: { + serializedName: "typeProperties.serviceEndpoint", + type: { + name: "String" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const AzureTableStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureTableStorage", + type: { + name: "Composite", + className: "AzureTableStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + sasUri: { + serializedName: "typeProperties.sasUri", + type: { + name: "any" + } + }, + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const AzureSqlDWLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureSqlDW", + type: { + name: "Composite", + className: "AzureSqlDWLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SqlServerLinkedService: coreHttp.CompositeMapper = { + serializedName: "SqlServer", + type: { + name: "Composite", + className: "SqlServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlDatabaseLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureSqlDatabase", + type: { + name: "Composite", + className: "AzureSqlDatabaseLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlMILinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureSqlMI", + type: { + name: "Composite", + className: "AzureSqlMILinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBatchLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureBatch", + type: { + name: "Composite", + className: "AzureBatchLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + accountName: { + serializedName: "typeProperties.accountName", + required: true, + type: { + name: "any" + } + }, + accessKey: { + serializedName: "typeProperties.accessKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + batchUri: { + serializedName: "typeProperties.batchUri", + required: true, + type: { + name: "any" + } + }, + poolName: { + serializedName: "typeProperties.poolName", + required: true, + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureKeyVaultLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureKeyVault", + type: { + name: "Composite", + className: "AzureKeyVaultLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + baseUrl: { + serializedName: "typeProperties.baseUrl", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbLinkedService: coreHttp.CompositeMapper = { + serializedName: "CosmosDb", + type: { + name: "Composite", + className: "CosmosDbLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountEndpoint: { + serializedName: "typeProperties.accountEndpoint", + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsLinkedService: coreHttp.CompositeMapper = { + serializedName: "Dynamics", + type: { + name: "Composite", + className: "DynamicsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + deploymentType: { + serializedName: "typeProperties.deploymentType", + required: true, + type: { + name: "String" + } + }, + hostName: { + serializedName: "typeProperties.hostName", + type: { + name: "String" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "String" + } + }, + serviceUri: { + serializedName: "typeProperties.serviceUri", + type: { + name: "String" + } + }, + organizationName: { + serializedName: "typeProperties.organizationName", + type: { + name: "String" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsCrmLinkedService: coreHttp.CompositeMapper = { + serializedName: "DynamicsCrm", + type: { + name: "Composite", + className: "DynamicsCrmLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + deploymentType: { + serializedName: "typeProperties.deploymentType", + required: true, + type: { + name: "String" + } + }, + hostName: { + serializedName: "typeProperties.hostName", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + serviceUri: { + serializedName: "typeProperties.serviceUri", + type: { + name: "any" + } + }, + organizationName: { + serializedName: "typeProperties.organizationName", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const CommonDataServiceForAppsLinkedService: coreHttp.CompositeMapper = { + serializedName: "CommonDataServiceForApps", + type: { + name: "Composite", + className: "CommonDataServiceForAppsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + deploymentType: { + serializedName: "typeProperties.deploymentType", + required: true, + type: { + name: "String" + } + }, + hostName: { + serializedName: "typeProperties.hostName", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + serviceUri: { + serializedName: "typeProperties.serviceUri", + type: { + name: "any" + } + }, + organizationName: { + serializedName: "typeProperties.organizationName", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HDInsightLinkedService: coreHttp.CompositeMapper = { + serializedName: "HDInsight", + type: { + name: "Composite", + className: "HDInsightLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clusterUri: { + serializedName: "typeProperties.clusterUri", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + hcatalogLinkedServiceName: { + serializedName: "typeProperties.hcatalogLinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + isEspEnabled: { + serializedName: "typeProperties.isEspEnabled", + type: { + name: "any" + } + }, + fileSystem: { + serializedName: "typeProperties.fileSystem", + type: { + name: "any" + } + } + } + } +}; + +export const FileServerLinkedService: coreHttp.CompositeMapper = { + serializedName: "FileServer", + type: { + name: "Composite", + className: "FileServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + userId: { + serializedName: "typeProperties.userId", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureFileStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureFileStorage", + type: { + name: "Composite", + className: "AzureFileStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + userId: { + serializedName: "typeProperties.userId", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleCloudStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "GoogleCloudStorage", + type: { + name: "Composite", + className: "GoogleCloudStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", + type: { + name: "any" + } + }, + secretAccessKey: { + serializedName: "typeProperties.secretAccessKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + serviceUrl: { + serializedName: "typeProperties.serviceUrl", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const OracleLinkedService: coreHttp.CompositeMapper = { + serializedName: "Oracle", + type: { + name: "Composite", + className: "OracleLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMySqlLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureMySql", + type: { + name: "Composite", + className: "AzureMySqlLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MySqlLinkedService: coreHttp.CompositeMapper = { + serializedName: "MySql", + type: { + name: "Composite", + className: "MySqlLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PostgreSqlLinkedService: coreHttp.CompositeMapper = { + serializedName: "PostgreSql", + type: { + name: "Composite", + className: "PostgreSqlLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SybaseLinkedService: coreHttp.CompositeMapper = { + serializedName: "Sybase", + type: { + name: "Composite", + className: "SybaseLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + }, + schema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const Db2LinkedService: coreHttp.CompositeMapper = { + serializedName: "Db2", + type: { + name: "Composite", + className: "Db2LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + packageCollection: { + serializedName: "typeProperties.packageCollection", + type: { + name: "any" + } + }, + certificateCommonName: { + serializedName: "typeProperties.certificateCommonName", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const TeradataLinkedService: coreHttp.CompositeMapper = { + serializedName: "Teradata", + type: { + name: "Composite", + className: "TeradataLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + server: { + serializedName: "typeProperties.server", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMLLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureML", + type: { + name: "Composite", + className: "AzureMLLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + mlEndpoint: { + serializedName: "typeProperties.mlEndpoint", + required: true, + type: { + name: "any" + } + }, + apiKey: { + serializedName: "typeProperties.apiKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + updateResourceEndpoint: { + serializedName: "typeProperties.updateResourceEndpoint", + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMLServiceLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureMLService", + type: { + name: "Composite", + className: "AzureMLServiceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + subscriptionId: { + serializedName: "typeProperties.subscriptionId", + required: true, + type: { + name: "any" + } + }, + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", + required: true, + type: { + name: "any" + } + }, + mlWorkspaceName: { + serializedName: "typeProperties.mlWorkspaceName", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const OdbcLinkedService: coreHttp.CompositeMapper = { + serializedName: "Odbc", + type: { + name: "Composite", + className: "OdbcLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + credential: { + serializedName: "typeProperties.credential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const InformixLinkedService: coreHttp.CompositeMapper = { + serializedName: "Informix", + type: { + name: "Composite", + className: "InformixLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + credential: { + serializedName: "typeProperties.credential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MicrosoftAccessLinkedService: coreHttp.CompositeMapper = { + serializedName: "MicrosoftAccess", + type: { + name: "Composite", + className: "MicrosoftAccessLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + credential: { + serializedName: "typeProperties.credential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HdfsLinkedService: coreHttp.CompositeMapper = { + serializedName: "Hdfs", + type: { + name: "Composite", + className: "HdfsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + } + } + } +}; + +export const ODataLinkedService: coreHttp.CompositeMapper = { + serializedName: "OData", + type: { + name: "Composite", + className: "ODataLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + aadResourceId: { + serializedName: "typeProperties.aadResourceId", + type: { + name: "any" + } + }, + aadServicePrincipalCredentialType: { + serializedName: "typeProperties.aadServicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalEmbeddedCert: { + serializedName: "typeProperties.servicePrincipalEmbeddedCert", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalEmbeddedCertPassword: { + serializedName: "typeProperties.servicePrincipalEmbeddedCertPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const WebLinkedService: coreHttp.CompositeMapper = { + serializedName: "Web", + type: { + name: "Composite", + className: "WebLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + typeProperties: { + serializedName: "typeProperties", + type: { + name: "Composite", + className: "WebLinkedServiceTypeProperties" + } + } + } + } +}; + +export const CassandraLinkedService: coreHttp.CompositeMapper = { + serializedName: "Cassandra", + type: { + name: "Composite", + className: "CassandraLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbLinkedService: coreHttp.CompositeMapper = { + serializedName: "MongoDb", + type: { + name: "Composite", + className: "MongoDbLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + databaseName: { + serializedName: "typeProperties.databaseName", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + authSource: { + serializedName: "typeProperties.authSource", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbV2LinkedService: coreHttp.CompositeMapper = { + serializedName: "MongoDbV2", + type: { + name: "Composite", + className: "MongoDbV2LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiLinkedService: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApi", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStore", + type: { + name: "Composite", + className: "AzureDataLakeStoreLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + dataLakeStoreUri: { + serializedName: "typeProperties.dataLakeStoreUri", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + accountName: { + serializedName: "typeProperties.accountName", + type: { + name: "any" + } + }, + subscriptionId: { + serializedName: "typeProperties.subscriptionId", + type: { + name: "any" + } + }, + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFS", + type: { + name: "Composite", + className: "AzureBlobFSLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const Office365LinkedService: coreHttp.CompositeMapper = { + serializedName: "Office365", + type: { + name: "Composite", + className: "Office365LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + office365TenantId: { + serializedName: "typeProperties.office365TenantId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalTenantId: { + serializedName: "typeProperties.servicePrincipalTenantId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceLinkedService: coreHttp.CompositeMapper = { + serializedName: "Salesforce", + type: { + name: "Composite", + className: "SalesforceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + environmentUrl: { + serializedName: "typeProperties.environmentUrl", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + securityToken: { + serializedName: "typeProperties.securityToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudLinkedService: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloud", + type: { + name: "Composite", + className: "SalesforceServiceCloudLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + environmentUrl: { + serializedName: "typeProperties.environmentUrl", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + securityToken: { + serializedName: "typeProperties.securityToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + extendedProperties: { + serializedName: "typeProperties.extendedProperties", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SapCloudForCustomerLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapCloudForCustomer", + type: { + name: "Composite", + className: "SapCloudForCustomerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SapEccLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapEcc", + type: { + name: "Composite", + className: "SapEccLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "String" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const SapOpenHubLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapOpenHub", + type: { + name: "Composite", + className: "SapOpenHubLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + systemNumber: { + serializedName: "typeProperties.systemNumber", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + language: { + serializedName: "typeProperties.language", + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const RestServiceLinkedService: coreHttp.CompositeMapper = { + serializedName: "RestService", + type: { + name: "Composite", + className: "RestServiceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + aadResourceId: { + serializedName: "typeProperties.aadResourceId", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonS3LinkedService: coreHttp.CompositeMapper = { + serializedName: "AmazonS3", + type: { + name: "Composite", + className: "AmazonS3LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", + type: { + name: "any" + } + }, + secretAccessKey: { + serializedName: "typeProperties.secretAccessKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + serviceUrl: { + serializedName: "typeProperties.serviceUrl", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonRedshiftLinkedService: coreHttp.CompositeMapper = { + serializedName: "AmazonRedshift", + type: { + name: "Composite", + className: "AmazonRedshiftLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const CustomDataSourceLinkedService: coreHttp.CompositeMapper = { + serializedName: "CustomDataSource", + type: { + name: "Composite", + className: "CustomDataSourceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + typeProperties: { + serializedName: "typeProperties", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureSearchLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureSearch", + type: { + name: "Composite", + className: "AzureSearchLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + key: { + serializedName: "typeProperties.key", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HttpLinkedService: coreHttp.CompositeMapper = { + serializedName: "HttpServer", + type: { + name: "Composite", + className: "HttpLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + embeddedCertData: { + serializedName: "typeProperties.embeddedCertData", + type: { + name: "any" + } + }, + certThumbprint: { + serializedName: "typeProperties.certThumbprint", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", + type: { + name: "any" + } + } + } + } +}; + +export const FtpServerLinkedService: coreHttp.CompositeMapper = { + serializedName: "FtpServer", + type: { + name: "Composite", + className: "FtpServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", + type: { + name: "any" + } + } + } + } +}; + +export const SftpServerLinkedService: coreHttp.CompositeMapper = { + serializedName: "Sftp", + type: { + name: "Composite", + className: "SftpServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + privateKeyPath: { + serializedName: "typeProperties.privateKeyPath", + type: { + name: "any" + } + }, + privateKeyContent: { + serializedName: "typeProperties.privateKeyContent", + type: { + name: "Composite", + className: "SecretBase" + } + }, + passPhrase: { + serializedName: "typeProperties.passPhrase", + type: { + name: "Composite", + className: "SecretBase" + } + }, + skipHostKeyValidation: { + serializedName: "typeProperties.skipHostKeyValidation", + type: { + name: "any" + } + }, + hostKeyFingerprint: { + serializedName: "typeProperties.hostKeyFingerprint", + type: { + name: "any" + } + } + } + } +}; + +export const SapBWLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapBW", + type: { + name: "Composite", + className: "SapBWLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + systemNumber: { + serializedName: "typeProperties.systemNumber", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SapHanaLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapHana", + type: { + name: "Composite", + className: "SapHanaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonMWSLinkedService: coreHttp.CompositeMapper = { + serializedName: "AmazonMWS", + type: { + name: "Composite", + className: "AmazonMWSLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + marketplaceID: { + serializedName: "typeProperties.marketplaceID", + required: true, + type: { + name: "any" + } + }, + sellerID: { + serializedName: "typeProperties.sellerID", + required: true, + type: { + name: "any" + } + }, + mwsAuthToken: { + serializedName: "typeProperties.mwsAuthToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", + required: true, + type: { + name: "any" + } + }, + secretKey: { + serializedName: "typeProperties.secretKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzurePostgreSqlLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzurePostgreSql", + type: { + name: "Composite", + className: "AzurePostgreSqlLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ConcurLinkedService: coreHttp.CompositeMapper = { + serializedName: "Concur", + type: { + name: "Composite", + className: "ConcurLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const CouchbaseLinkedService: coreHttp.CompositeMapper = { + serializedName: "Couchbase", + type: { + name: "Composite", + className: "CouchbaseLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + credString: { + serializedName: "typeProperties.credString", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const DrillLinkedService: coreHttp.CompositeMapper = { + serializedName: "Drill", + type: { + name: "Composite", + className: "DrillLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const EloquaLinkedService: coreHttp.CompositeMapper = { + serializedName: "Eloqua", + type: { + name: "Composite", + className: "EloquaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleBigQueryLinkedService: coreHttp.CompositeMapper = { + serializedName: "GoogleBigQuery", + type: { + name: "Composite", + className: "GoogleBigQueryLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + project: { + serializedName: "typeProperties.project", + required: true, + type: { + name: "any" + } + }, + additionalProjects: { + serializedName: "typeProperties.additionalProjects", + type: { + name: "any" + } + }, + requestGoogleDriveScope: { + serializedName: "typeProperties.requestGoogleDriveScope", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + email: { + serializedName: "typeProperties.email", + type: { + name: "any" + } + }, + keyFilePath: { + serializedName: "typeProperties.keyFilePath", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const GreenplumLinkedService: coreHttp.CompositeMapper = { + serializedName: "Greenplum", + type: { + name: "Composite", + className: "GreenplumLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HBaseLinkedService: coreHttp.CompositeMapper = { + serializedName: "HBase", + type: { + name: "Composite", + className: "HBaseLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HiveLinkedService: coreHttp.CompositeMapper = { + serializedName: "Hive", + type: { + name: "Composite", + className: "HiveLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + serverType: { + serializedName: "typeProperties.serverType", + type: { + name: "String" + } + }, + thriftTransportProtocol: { + serializedName: "typeProperties.thriftTransportProtocol", + type: { + name: "String" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + serviceDiscoveryMode: { + serializedName: "typeProperties.serviceDiscoveryMode", + type: { + name: "any" + } + }, + zooKeeperNameSpace: { + serializedName: "typeProperties.zooKeeperNameSpace", + type: { + name: "any" + } + }, + useNativeQuery: { + serializedName: "typeProperties.useNativeQuery", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HubspotLinkedService: coreHttp.CompositeMapper = { + serializedName: "Hubspot", + type: { + name: "Composite", + className: "HubspotLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ImpalaLinkedService: coreHttp.CompositeMapper = { + serializedName: "Impala", + type: { + name: "Composite", + className: "ImpalaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const JiraLinkedService: coreHttp.CompositeMapper = { + serializedName: "Jira", + type: { + name: "Composite", + className: "JiraLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MagentoLinkedService: coreHttp.CompositeMapper = { + serializedName: "Magento", + type: { + name: "Composite", + className: "MagentoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MariaDBLinkedService: coreHttp.CompositeMapper = { + serializedName: "MariaDB", + type: { + name: "Composite", + className: "MariaDBLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMariaDBLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureMariaDB", + type: { + name: "Composite", + className: "AzureMariaDBLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MarketoLinkedService: coreHttp.CompositeMapper = { + serializedName: "Marketo", + type: { + name: "Composite", + className: "MarketoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PaypalLinkedService: coreHttp.CompositeMapper = { + serializedName: "Paypal", + type: { + name: "Composite", + className: "PaypalLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PhoenixLinkedService: coreHttp.CompositeMapper = { + serializedName: "Phoenix", + type: { + name: "Composite", + className: "PhoenixLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PrestoLinkedService: coreHttp.CompositeMapper = { + serializedName: "Presto", + type: { + name: "Composite", + className: "PrestoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + serverVersion: { + serializedName: "typeProperties.serverVersion", + required: true, + type: { + name: "any" + } + }, + catalog: { + serializedName: "typeProperties.catalog", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + timeZoneID: { + serializedName: "typeProperties.timeZoneID", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const QuickBooksLinkedService: coreHttp.CompositeMapper = { + serializedName: "QuickBooks", + type: { + name: "Composite", + className: "QuickBooksLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + companyId: { + serializedName: "typeProperties.companyId", + required: true, + type: { + name: "any" + } + }, + consumerKey: { + serializedName: "typeProperties.consumerKey", + required: true, + type: { + name: "any" + } + }, + consumerSecret: { + serializedName: "typeProperties.consumerSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessTokenSecret: { + serializedName: "typeProperties.accessTokenSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ServiceNowLinkedService: coreHttp.CompositeMapper = { + serializedName: "ServiceNow", + type: { + name: "Composite", + className: "ServiceNowLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ShopifyLinkedService: coreHttp.CompositeMapper = { + serializedName: "Shopify", + type: { + name: "Composite", + className: "ShopifyLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SparkLinkedService: coreHttp.CompositeMapper = { + serializedName: "Spark", + type: { + name: "Composite", + className: "SparkLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + required: true, + type: { + name: "any" + } + }, + serverType: { + serializedName: "typeProperties.serverType", + type: { + name: "String" + } + }, + thriftTransportProtocol: { + serializedName: "typeProperties.thriftTransportProtocol", + type: { + name: "String" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SquareLinkedService: coreHttp.CompositeMapper = { + serializedName: "Square", + type: { + name: "Composite", + className: "SquareLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + redirectUri: { + serializedName: "typeProperties.redirectUri", + required: true, + type: { + name: "any" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const XeroLinkedService: coreHttp.CompositeMapper = { + serializedName: "Xero", + type: { + name: "Composite", + className: "XeroLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + consumerKey: { + serializedName: "typeProperties.consumerKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + privateKey: { + serializedName: "typeProperties.privateKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ZohoLinkedService: coreHttp.CompositeMapper = { + serializedName: "Zoho", + type: { + name: "Composite", + className: "ZohoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const VerticaLinkedService: coreHttp.CompositeMapper = { + serializedName: "Vertica", + type: { + name: "Composite", + className: "VerticaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const NetezzaLinkedService: coreHttp.CompositeMapper = { + serializedName: "Netezza", + type: { + name: "Composite", + className: "NetezzaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceMarketingCloudLinkedService: coreHttp.CompositeMapper = { + serializedName: "SalesforceMarketingCloud", + type: { + name: "Composite", + className: "SalesforceMarketingCloudLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HDInsightOnDemandLinkedService: coreHttp.CompositeMapper = { + serializedName: "HDInsightOnDemand", + type: { + name: "Composite", + className: "HDInsightOnDemandLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clusterSize: { + serializedName: "typeProperties.clusterSize", + required: true, + type: { + name: "any" + } + }, + timeToLive: { + serializedName: "typeProperties.timeToLive", + required: true, + type: { + name: "any" + } + }, + version: { + serializedName: "typeProperties.version", + required: true, + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + hostSubscriptionId: { + serializedName: "typeProperties.hostSubscriptionId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, + type: { + name: "any" + } + }, + clusterResourceGroup: { + serializedName: "typeProperties.clusterResourceGroup", + required: true, + type: { + name: "any" + } + }, + clusterNamePrefix: { + serializedName: "typeProperties.clusterNamePrefix", + type: { + name: "any" + } + }, + clusterUserName: { + serializedName: "typeProperties.clusterUserName", + type: { + name: "any" + } + }, + clusterPassword: { + serializedName: "typeProperties.clusterPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clusterSshUserName: { + serializedName: "typeProperties.clusterSshUserName", + type: { + name: "any" + } + }, + clusterSshPassword: { + serializedName: "typeProperties.clusterSshPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + additionalLinkedServiceNames: { + serializedName: "typeProperties.additionalLinkedServiceNames", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + hcatalogLinkedServiceName: { + serializedName: "typeProperties.hcatalogLinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + clusterType: { + serializedName: "typeProperties.clusterType", + type: { + name: "any" + } + }, + sparkVersion: { + serializedName: "typeProperties.sparkVersion", + type: { + name: "any" + } + }, + coreConfiguration: { + serializedName: "typeProperties.coreConfiguration", + type: { + name: "any" + } + }, + hBaseConfiguration: { + serializedName: "typeProperties.hBaseConfiguration", + type: { + name: "any" + } + }, + hdfsConfiguration: { + serializedName: "typeProperties.hdfsConfiguration", + type: { + name: "any" + } + }, + hiveConfiguration: { + serializedName: "typeProperties.hiveConfiguration", + type: { + name: "any" + } + }, + mapReduceConfiguration: { + serializedName: "typeProperties.mapReduceConfiguration", + type: { + name: "any" + } + }, + oozieConfiguration: { + serializedName: "typeProperties.oozieConfiguration", + type: { + name: "any" + } + }, + stormConfiguration: { + serializedName: "typeProperties.stormConfiguration", + type: { + name: "any" + } + }, + yarnConfiguration: { + serializedName: "typeProperties.yarnConfiguration", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + headNodeSize: { + serializedName: "typeProperties.headNodeSize", + type: { + name: "any" + } + }, + dataNodeSize: { + serializedName: "typeProperties.dataNodeSize", + type: { + name: "any" + } + }, + zookeeperNodeSize: { + serializedName: "typeProperties.zookeeperNodeSize", + type: { + name: "any" + } + }, + scriptActions: { + serializedName: "typeProperties.scriptActions", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ScriptAction" + } + } + } + }, + virtualNetworkId: { + serializedName: "typeProperties.virtualNetworkId", + type: { + name: "any" + } + }, + subnetName: { + serializedName: "typeProperties.subnetName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeAnalyticsLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeAnalytics", + type: { + name: "Composite", + className: "AzureDataLakeAnalyticsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + accountName: { + serializedName: "typeProperties.accountName", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, + type: { + name: "any" + } + }, + subscriptionId: { + serializedName: "typeProperties.subscriptionId", + type: { + name: "any" + } + }, + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", + type: { + name: "any" + } + }, + dataLakeAnalyticsUri: { + serializedName: "typeProperties.dataLakeAnalyticsUri", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDatabricksLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDatabricks", + type: { + name: "Composite", + className: "AzureDatabricksLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + domain: { + serializedName: "typeProperties.domain", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + existingClusterId: { + serializedName: "typeProperties.existingClusterId", + type: { + name: "any" + } + }, + instancePoolId: { + serializedName: "typeProperties.instancePoolId", + type: { + name: "any" + } + }, + newClusterVersion: { + serializedName: "typeProperties.newClusterVersion", + type: { + name: "any" + } + }, + newClusterNumOfWorker: { + serializedName: "typeProperties.newClusterNumOfWorker", + type: { + name: "any" + } + }, + newClusterNodeType: { + serializedName: "typeProperties.newClusterNodeType", + type: { + name: "any" + } + }, + newClusterSparkConf: { + serializedName: "typeProperties.newClusterSparkConf", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterSparkEnvVars: { + serializedName: "typeProperties.newClusterSparkEnvVars", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterCustomTags: { + serializedName: "typeProperties.newClusterCustomTags", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterDriverNodeType: { + serializedName: "typeProperties.newClusterDriverNodeType", + type: { + name: "any" + } + }, + newClusterInitScripts: { + serializedName: "typeProperties.newClusterInitScripts", + type: { + name: "any" + } + }, + newClusterEnableElasticDisk: { + serializedName: "typeProperties.newClusterEnableElasticDisk", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ResponsysLinkedService: coreHttp.CompositeMapper = { + serializedName: "Responsys", + type: { + name: "Composite", + className: "ResponsysLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsAXLinkedService: coreHttp.CompositeMapper = { + serializedName: "DynamicsAX", + type: { + name: "Composite", + className: "DynamicsAXLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, + type: { + name: "any" + } + }, + aadResourceId: { + serializedName: "typeProperties.aadResourceId", + required: true, + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const OracleServiceCloudLinkedService: coreHttp.CompositeMapper = { + serializedName: "OracleServiceCloud", + type: { + name: "Composite", + className: "OracleServiceCloudLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleAdWordsLinkedService: coreHttp.CompositeMapper = { + serializedName: "GoogleAdWords", + type: { + name: "Composite", + className: "GoogleAdWordsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clientCustomerID: { + serializedName: "typeProperties.clientCustomerID", + required: true, + type: { + name: "any" + } + }, + developerToken: { + serializedName: "typeProperties.developerToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + email: { + serializedName: "typeProperties.email", + type: { + name: "any" + } + }, + keyFilePath: { + serializedName: "typeProperties.keyFilePath", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SapTableLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapTable", + type: { + name: "Composite", + className: "SapTableLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + type: { + name: "any" + } + }, + systemNumber: { + serializedName: "typeProperties.systemNumber", + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + language: { + serializedName: "typeProperties.language", + type: { + name: "any" + } + }, + systemId: { + serializedName: "typeProperties.systemId", + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + messageServer: { + serializedName: "typeProperties.messageServer", + type: { + name: "any" + } + }, + messageServerService: { + serializedName: "typeProperties.messageServerService", + type: { + name: "any" + } + }, + sncMode: { + serializedName: "typeProperties.sncMode", + type: { + name: "any" + } + }, + sncMyName: { + serializedName: "typeProperties.sncMyName", + type: { + name: "any" + } + }, + sncPartnerName: { + serializedName: "typeProperties.sncPartnerName", + type: { + name: "any" + } + }, + sncLibraryPath: { + serializedName: "typeProperties.sncLibraryPath", + type: { + name: "any" + } + }, + sncQop: { + serializedName: "typeProperties.sncQop", + type: { + name: "any" + } + }, + logonGroup: { + serializedName: "typeProperties.logonGroup", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataExplorerLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorer", + type: { + name: "Composite", + className: "AzureDataExplorerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureFunctionLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureFunction", + type: { + name: "Composite", + className: "AzureFunctionLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + functionAppUrl: { + serializedName: "typeProperties.functionAppUrl", + required: true, + type: { + name: "any" + } + }, + functionKey: { + serializedName: "typeProperties.functionKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureEntityResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AzureEntityResource", + modelProperties: { + ...Resource.type.modelProperties, + etag: { + serializedName: "etag", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const PrivateEndpointConnection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PrivateEndpointConnection", + modelProperties: { + ...Resource.type.modelProperties, + privateEndpoint: { + serializedName: "properties.privateEndpoint", + type: { + name: "Composite", + className: "PrivateEndpoint" + } + }, + privateLinkServiceConnectionState: { + serializedName: "properties.privateLinkServiceConnectionState", + type: { + name: "Composite", + className: "PrivateLinkServiceConnectionState" + } + }, + provisioningState: { + serializedName: "properties.provisioningState", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const TrackedResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TrackedResource", + modelProperties: { + ...Resource.type.modelProperties, + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + location: { + serializedName: "location", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const ProxyResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ProxyResource", + modelProperties: { + ...Resource.type.modelProperties + } + } +}; + +export const AvroDataset: coreHttp.CompositeMapper = { + serializedName: "Avro", + type: { + name: "Composite", + className: "AvroDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + avroCompressionCodec: { + serializedName: "typeProperties.avroCompressionCodec", + type: { + name: "String" + } + }, + avroCompressionLevel: { + constraints: { + InclusiveMaximum: 9, + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.avroCompressionLevel", + type: { + name: "Number" + } + } + } + } +}; + +export const ParquetDataset: coreHttp.CompositeMapper = { + serializedName: "Parquet", + type: { + name: "Composite", + className: "ParquetDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + compressionCodec: { + serializedName: "typeProperties.compressionCodec", + type: { + name: "String" + } + } + } + } +}; + +export const DelimitedTextDataset: coreHttp.CompositeMapper = { + serializedName: "DelimitedText", + type: { + name: "Composite", + className: "DelimitedTextDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + columnDelimiter: { + serializedName: "typeProperties.columnDelimiter", + type: { + name: "any" + } + }, + rowDelimiter: { + serializedName: "typeProperties.rowDelimiter", + type: { + name: "any" + } + }, + encodingName: { + serializedName: "typeProperties.encodingName", + type: { + name: "any" + } + }, + compressionCodec: { + serializedName: "typeProperties.compressionCodec", + type: { + name: "String" + } + }, + compressionLevel: { + serializedName: "typeProperties.compressionLevel", + type: { + name: "String" + } + }, + quoteChar: { + serializedName: "typeProperties.quoteChar", + type: { + name: "any" + } + }, + escapeChar: { + serializedName: "typeProperties.escapeChar", + type: { + name: "any" + } + }, + firstRowAsHeader: { + serializedName: "typeProperties.firstRowAsHeader", + type: { + name: "any" + } + }, + nullValue: { + serializedName: "typeProperties.nullValue", + type: { + name: "any" + } + } + } + } +}; + +export const JsonDataset: coreHttp.CompositeMapper = { + serializedName: "Json", + type: { + name: "Composite", + className: "JsonDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + encodingName: { + serializedName: "typeProperties.encodingName", + type: { + name: "any" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + +export const OrcDataset: coreHttp.CompositeMapper = { + serializedName: "Orc", + type: { + name: "Composite", + className: "OrcDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + orcCompressionCodec: { + serializedName: "typeProperties.orcCompressionCodec", + type: { + name: "String" + } + } + } + } +}; + +export const BinaryDataset: coreHttp.CompositeMapper = { + serializedName: "Binary", + type: { + name: "Composite", + className: "BinaryDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + +export const AzureTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureTable", + type: { + name: "Composite", + className: "AzureTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSqlTable", + type: { + name: "Composite", + className: "AzureSqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlMITableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSqlMITable", + type: { + name: "Composite", + className: "AzureSqlMITableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlDWTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSqlDWTable", + type: { + name: "Composite", + className: "AzureSqlDWTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const CassandraTableDataset: coreHttp.CompositeMapper = { + serializedName: "CassandraTable", + type: { + name: "Composite", + className: "CassandraTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + keyspace: { + serializedName: "typeProperties.keyspace", + type: { + name: "any" + } + } + } + } +}; + +export const CustomDataset: coreHttp.CompositeMapper = { + serializedName: "CustomDataset", + type: { + name: "Composite", + className: "CustomDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + typeProperties: { + serializedName: "typeProperties", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbSqlApiCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "CosmosDbSqlApiCollection", + type: { + name: "Composite", + className: "CosmosDbSqlApiCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const DocumentDbCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "DocumentDbCollection", + type: { + name: "Composite", + className: "DocumentDbCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsEntityDataset: coreHttp.CompositeMapper = { + serializedName: "DynamicsEntity", + type: { + name: "Composite", + className: "DynamicsEntityDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsCrmEntityDataset: coreHttp.CompositeMapper = { + serializedName: "DynamicsCrmEntity", + type: { + name: "Composite", + className: "DynamicsCrmEntityDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", + type: { + name: "any" + } + } + } + } +}; + +export const CommonDataServiceForAppsEntityDataset: coreHttp.CompositeMapper = { + serializedName: "CommonDataServiceForAppsEntity", + type: { + name: "Composite", + className: "CommonDataServiceForAppsEntityDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", + type: { + name: "any" + } + } + } + } +}; + +export const Office365Dataset: coreHttp.CompositeMapper = { + serializedName: "Office365Table", + type: { + name: "Composite", + className: "Office365Dataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, + type: { + name: "any" + } + }, + predicate: { + serializedName: "typeProperties.predicate", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "MongoDbCollection", + type: { + name: "Composite", + className: "MongoDbCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbV2CollectionDataset: coreHttp.CompositeMapper = { + serializedName: "MongoDbV2Collection", + type: { + name: "Composite", + className: "MongoDbV2CollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collection: { + serializedName: "typeProperties.collection", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApiCollection", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collection: { + serializedName: "typeProperties.collection", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const ODataResourceDataset: coreHttp.CompositeMapper = { + serializedName: "ODataResource", + type: { + name: "Composite", + className: "ODataResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + type: { + name: "any" + } + } + } + } +}; + +export const OracleTableDataset: coreHttp.CompositeMapper = { + serializedName: "OracleTable", + type: { + name: "Composite", + className: "OracleTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const TeradataTableDataset: coreHttp.CompositeMapper = { + serializedName: "TeradataTable", + type: { + name: "Composite", + className: "TeradataTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + database: { + serializedName: "typeProperties.database", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMySqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureMySqlTable", + type: { + name: "Composite", + className: "AzureMySqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonRedshiftTableDataset: coreHttp.CompositeMapper = { + serializedName: "AmazonRedshiftTable", + type: { + name: "Composite", + className: "AmazonRedshiftTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const Db2TableDataset: coreHttp.CompositeMapper = { + serializedName: "Db2Table", + type: { + name: "Composite", + className: "Db2TableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const RelationalTableDataset: coreHttp.CompositeMapper = { + serializedName: "RelationalTable", + type: { + name: "Composite", + className: "RelationalTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const InformixTableDataset: coreHttp.CompositeMapper = { + serializedName: "InformixTable", + type: { + name: "Composite", + className: "InformixTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const OdbcTableDataset: coreHttp.CompositeMapper = { + serializedName: "OdbcTable", + type: { + name: "Composite", + className: "OdbcTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const MySqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "MySqlTable", + type: { + name: "Composite", + className: "MySqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const PostgreSqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "PostgreSqlTable", + type: { + name: "Composite", + className: "PostgreSqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const MicrosoftAccessTableDataset: coreHttp.CompositeMapper = { + serializedName: "MicrosoftAccessTable", + type: { + name: "Composite", + className: "MicrosoftAccessTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SalesforceObject", + type: { + name: "Composite", + className: "SalesforceObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + objectApiName: { + serializedName: "typeProperties.objectApiName", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloudObject", + type: { + name: "Composite", + className: "SalesforceServiceCloudObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + objectApiName: { + serializedName: "typeProperties.objectApiName", + type: { + name: "any" + } + } + } + } +}; + +export const SybaseTableDataset: coreHttp.CompositeMapper = { + serializedName: "SybaseTable", + type: { + name: "Composite", + className: "SybaseTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const SapBwCubeDataset: coreHttp.CompositeMapper = { + serializedName: "SapBwCube", + type: { + name: "Composite", + className: "SapBwCubeDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties + } + } +}; + +export const SapCloudForCustomerResourceDataset: coreHttp.CompositeMapper = { + serializedName: "SapCloudForCustomerResource", + type: { + name: "Composite", + className: "SapCloudForCustomerResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const SapEccResourceDataset: coreHttp.CompositeMapper = { + serializedName: "SapEccResource", + type: { + name: "Composite", + className: "SapEccResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const SapHanaTableDataset: coreHttp.CompositeMapper = { + serializedName: "SapHanaTable", + type: { + name: "Composite", + className: "SapHanaTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const SapOpenHubTableDataset: coreHttp.CompositeMapper = { + serializedName: "SapOpenHubTable", + type: { + name: "Composite", + className: "SapOpenHubTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + openHubDestinationName: { + serializedName: "typeProperties.openHubDestinationName", + required: true, + type: { + name: "any" + } + }, + excludeLastRequest: { + serializedName: "typeProperties.excludeLastRequest", + type: { + name: "any" + } + }, + baseRequestId: { + serializedName: "typeProperties.baseRequestId", + type: { + name: "any" + } + } + } + } +}; + +export const SqlServerTableDataset: coreHttp.CompositeMapper = { + serializedName: "SqlServerTable", + type: { + name: "Composite", + className: "SqlServerTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const RestResourceDataset: coreHttp.CompositeMapper = { + serializedName: "RestResource", + type: { + name: "Composite", + className: "RestResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + relativeUrl: { + serializedName: "typeProperties.relativeUrl", + type: { + name: "any" + } + }, + requestMethod: { + serializedName: "typeProperties.requestMethod", + type: { + name: "any" + } + }, + requestBody: { + serializedName: "typeProperties.requestBody", + type: { + name: "any" + } + }, + additionalHeaders: { + serializedName: "typeProperties.additionalHeaders", + type: { + name: "any" + } + }, + paginationRules: { + serializedName: "typeProperties.paginationRules", + type: { + name: "any" + } + } + } + } +}; + +export const SapTableResourceDataset: coreHttp.CompositeMapper = { + serializedName: "SapTableResource", + type: { + name: "Composite", + className: "SapTableResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const WebTableDataset: coreHttp.CompositeMapper = { + serializedName: "WebTable", + type: { + name: "Composite", + className: "WebTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + index: { + serializedName: "typeProperties.index", + required: true, + type: { + name: "any" + } + }, + path: { + serializedName: "typeProperties.path", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSearchIndexDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSearchIndex", + type: { + name: "Composite", + className: "AzureSearchIndexDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + indexName: { + serializedName: "typeProperties.indexName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AmazonMWSObjectDataset: coreHttp.CompositeMapper = { + serializedName: "AmazonMWSObject", + type: { + name: "Composite", + className: "AmazonMWSObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const AzurePostgreSqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzurePostgreSqlTable", + type: { + name: "Composite", + className: "AzurePostgreSqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const ConcurObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ConcurObject", + type: { + name: "Composite", + className: "ConcurObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const CouchbaseTableDataset: coreHttp.CompositeMapper = { + serializedName: "CouchbaseTable", + type: { + name: "Composite", + className: "CouchbaseTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const DrillTableDataset: coreHttp.CompositeMapper = { + serializedName: "DrillTable", + type: { + name: "Composite", + className: "DrillTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const EloquaObjectDataset: coreHttp.CompositeMapper = { + serializedName: "EloquaObject", + type: { + name: "Composite", + className: "EloquaObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleBigQueryObjectDataset: coreHttp.CompositeMapper = { + serializedName: "GoogleBigQueryObject", + type: { + name: "Composite", + className: "GoogleBigQueryObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "any" + } + } + } + } +}; + +export const GreenplumTableDataset: coreHttp.CompositeMapper = { + serializedName: "GreenplumTable", + type: { + name: "Composite", + className: "GreenplumTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const HBaseObjectDataset: coreHttp.CompositeMapper = { + serializedName: "HBaseObject", + type: { + name: "Composite", + className: "HBaseObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const HiveObjectDataset: coreHttp.CompositeMapper = { + serializedName: "HiveObject", + type: { + name: "Composite", + className: "HiveObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const HubspotObjectDataset: coreHttp.CompositeMapper = { + serializedName: "HubspotObject", + type: { + name: "Composite", + className: "HubspotObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ImpalaObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ImpalaObject", + type: { + name: "Composite", + className: "ImpalaObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const JiraObjectDataset: coreHttp.CompositeMapper = { + serializedName: "JiraObject", + type: { + name: "Composite", + className: "JiraObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const MagentoObjectDataset: coreHttp.CompositeMapper = { + serializedName: "MagentoObject", + type: { + name: "Composite", + className: "MagentoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const MariaDBTableDataset: coreHttp.CompositeMapper = { + serializedName: "MariaDBTable", + type: { + name: "Composite", + className: "MariaDBTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMariaDBTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureMariaDBTable", + type: { + name: "Composite", + className: "AzureMariaDBTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const MarketoObjectDataset: coreHttp.CompositeMapper = { + serializedName: "MarketoObject", + type: { + name: "Composite", + className: "MarketoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const PaypalObjectDataset: coreHttp.CompositeMapper = { + serializedName: "PaypalObject", + type: { + name: "Composite", + className: "PaypalObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const PhoenixObjectDataset: coreHttp.CompositeMapper = { + serializedName: "PhoenixObject", + type: { + name: "Composite", + className: "PhoenixObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const PrestoObjectDataset: coreHttp.CompositeMapper = { + serializedName: "PrestoObject", + type: { + name: "Composite", + className: "PrestoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const QuickBooksObjectDataset: coreHttp.CompositeMapper = { + serializedName: "QuickBooksObject", + type: { + name: "Composite", + className: "QuickBooksObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ServiceNowObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ServiceNowObject", + type: { + name: "Composite", + className: "ServiceNowObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ShopifyObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ShopifyObject", + type: { + name: "Composite", + className: "ShopifyObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const SparkObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SparkObject", + type: { + name: "Composite", + className: "SparkObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const SquareObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SquareObject", + type: { + name: "Composite", + className: "SquareObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const XeroObjectDataset: coreHttp.CompositeMapper = { + serializedName: "XeroObject", + type: { + name: "Composite", + className: "XeroObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ZohoObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ZohoObject", + type: { + name: "Composite", + className: "ZohoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const NetezzaTableDataset: coreHttp.CompositeMapper = { + serializedName: "NetezzaTable", + type: { + name: "Composite", + className: "NetezzaTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const VerticaTableDataset: coreHttp.CompositeMapper = { + serializedName: "VerticaTable", + type: { + name: "Composite", + className: "VerticaTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceMarketingCloudObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SalesforceMarketingCloudObject", + type: { + name: "Composite", + className: "SalesforceMarketingCloudObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ResponsysObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ResponsysObject", + type: { + name: "Composite", + className: "ResponsysObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsAXResourceDataset: coreHttp.CompositeMapper = { + serializedName: "DynamicsAXResource", + type: { + name: "Composite", + className: "DynamicsAXResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const OracleServiceCloudObjectDataset: coreHttp.CompositeMapper = { + serializedName: "OracleServiceCloudObject", + type: { + name: "Composite", + className: "OracleServiceCloudObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataExplorerTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorerTable", + type: { + name: "Composite", + className: "AzureDataExplorerTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleAdWordsObjectDataset: coreHttp.CompositeMapper = { + serializedName: "GoogleAdWordsObject", + type: { + name: "Composite", + className: "GoogleAdWordsObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ControlActivity: coreHttp.CompositeMapper = { + serializedName: "Container", + type: { + name: "Composite", + className: "ControlActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties + } + } +}; + +export const ExecutionActivity: coreHttp.CompositeMapper = { + serializedName: "Execution", + type: { + name: "Composite", + className: "ExecutionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + ...Activity.type.modelProperties, + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + policy: { + serializedName: "policy", + type: { + name: "Composite", + className: "ActivityPolicy" + } + } + } + } +}; + +export const ExecutePipelineActivity: coreHttp.CompositeMapper = { + serializedName: "ExecutePipeline", + type: { + name: "Composite", + className: "ExecutePipelineActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + pipeline: { + serializedName: "typeProperties.pipeline", + type: { + name: "Composite", + className: "PipelineReference" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + waitOnCompletion: { + serializedName: "typeProperties.waitOnCompletion", + type: { + name: "Boolean" + } + } + } + } +}; + +export const IfConditionActivity: coreHttp.CompositeMapper = { + serializedName: "IfCondition", + type: { + name: "Composite", + className: "IfConditionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + expression: { + serializedName: "typeProperties.expression", + type: { + name: "Composite", + className: "Expression" + } + }, + ifTrueActivities: { + serializedName: "typeProperties.ifTrueActivities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + }, + ifFalseActivities: { + serializedName: "typeProperties.ifFalseActivities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const SwitchActivity: coreHttp.CompositeMapper = { + serializedName: "Switch", + type: { + name: "Composite", + className: "SwitchActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + on: { + serializedName: "typeProperties.on", + type: { + name: "Composite", + className: "Expression" + } + }, + cases: { + serializedName: "typeProperties.cases", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SwitchCase" + } + } + } + }, + defaultActivities: { + serializedName: "typeProperties.defaultActivities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const ForEachActivity: coreHttp.CompositeMapper = { + serializedName: "ForEach", + type: { + name: "Composite", + className: "ForEachActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + isSequential: { + serializedName: "typeProperties.isSequential", + type: { + name: "Boolean" + } + }, + batchCount: { + constraints: { + InclusiveMaximum: 50 + }, + serializedName: "typeProperties.batchCount", + type: { + name: "Number" + } + }, + items: { + serializedName: "typeProperties.items", + type: { + name: "Composite", + className: "Expression" + } + }, + activities: { + serializedName: "typeProperties.activities", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const WaitActivity: coreHttp.CompositeMapper = { + serializedName: "Wait", + type: { + name: "Composite", + className: "WaitActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + waitTimeInSeconds: { + serializedName: "typeProperties.waitTimeInSeconds", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const UntilActivity: coreHttp.CompositeMapper = { + serializedName: "Until", + type: { + name: "Composite", + className: "UntilActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + expression: { + serializedName: "typeProperties.expression", + type: { + name: "Composite", + className: "Expression" + } + }, + timeout: { + serializedName: "typeProperties.timeout", + type: { + name: "any" + } + }, + activities: { + serializedName: "typeProperties.activities", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const ValidationActivity: coreHttp.CompositeMapper = { + serializedName: "Validation", + type: { + name: "Composite", + className: "ValidationActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + timeout: { + serializedName: "typeProperties.timeout", + type: { + name: "any" + } + }, + sleep: { + serializedName: "typeProperties.sleep", + type: { + name: "any" + } + }, + minimumSize: { + serializedName: "typeProperties.minimumSize", + type: { + name: "any" + } + }, + childItems: { + serializedName: "typeProperties.childItems", + type: { + name: "any" + } + }, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } +}; + +export const FilterActivity: coreHttp.CompositeMapper = { + serializedName: "Filter", + type: { + name: "Composite", + className: "FilterActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + items: { + serializedName: "typeProperties.items", + type: { + name: "Composite", + className: "Expression" + } + }, + condition: { + serializedName: "typeProperties.condition", + type: { + name: "Composite", + className: "Expression" + } + } + } + } +}; + +export const SetVariableActivity: coreHttp.CompositeMapper = { + serializedName: "SetVariable", + type: { + name: "Composite", + className: "SetVariableActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + variableName: { + serializedName: "typeProperties.variableName", + type: { + name: "String" + } + }, + value: { + serializedName: "typeProperties.value", + type: { + name: "any" + } + } + } + } +}; + +export const AppendVariableActivity: coreHttp.CompositeMapper = { + serializedName: "AppendVariable", + type: { + name: "Composite", + className: "AppendVariableActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + variableName: { + serializedName: "typeProperties.variableName", + type: { + name: "String" + } + }, + value: { + serializedName: "typeProperties.value", + type: { + name: "any" + } + } + } + } +}; + +export const WebHookActivity: coreHttp.CompositeMapper = { + serializedName: "WebHook", + type: { + name: "Composite", + className: "WebHookActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + method: { + serializedName: "typeProperties.method", + required: true, + type: { + name: "String" + } + }, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + timeout: { + serializedName: "typeProperties.timeout", + type: { + name: "String" + } + }, + headers: { + serializedName: "typeProperties.headers", + type: { + name: "any" + } + }, + body: { + serializedName: "typeProperties.body", + type: { + name: "any" + } + }, + authentication: { + serializedName: "typeProperties.authentication", + type: { + name: "Composite", + className: "WebActivityAuthentication" + } + }, + reportStatusOnCallBack: { + serializedName: "typeProperties.reportStatusOnCallBack", + type: { + name: "any" + } + } + } + } +}; + +export const SynapseNotebookActivity: coreHttp.CompositeMapper = { + serializedName: "SynapseNotebook", + type: { + name: "Composite", + className: "SynapseNotebookActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + notebook: { + serializedName: "typeProperties.notebook", + type: { + name: "Composite", + className: "SynapseNotebookReference" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const SynapseSparkJobDefinitionActivity: coreHttp.CompositeMapper = { + serializedName: "SparkJob", + type: { + name: "Composite", + className: "SynapseSparkJobDefinitionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + sparkJob: { + serializedName: "typeProperties.sparkJob", + type: { + name: "Composite", + className: "SynapseSparkJobReference" + } + } + } + } +}; + +export const SqlPoolStoredProcedureActivity: coreHttp.CompositeMapper = { + serializedName: "SqlPoolStoredProcedure", + type: { + name: "Composite", + className: "SqlPoolStoredProcedureActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + sqlPool: { + serializedName: "sqlPool", + type: { + name: "Composite", + className: "SqlPoolReference" + } + }, + storedProcedureName: { + serializedName: "typeProperties.storedProcedureName", + required: true, + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "typeProperties.storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + } + } + } +}; + +export const RerunTumblingWindowTrigger: coreHttp.CompositeMapper = { + serializedName: "RerunTumblingWindowTrigger", + type: { + name: "Composite", + className: "RerunTumblingWindowTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...Trigger.type.modelProperties, + parentTrigger: { + serializedName: "typeProperties.parentTrigger", + type: { + name: "any" + } + }, + requestedStartTime: { + serializedName: "typeProperties.requestedStartTime", + required: true, + type: { + name: "DateTime" + } + }, + requestedEndTime: { + serializedName: "typeProperties.requestedEndTime", + required: true, + type: { + name: "DateTime" + } + }, + maxConcurrency: { + constraints: { + InclusiveMaximum: 50, + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.maxConcurrency", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const MultiplePipelineTrigger: coreHttp.CompositeMapper = { + serializedName: "MultiplePipelineTrigger", + type: { + name: "Composite", + className: "MultiplePipelineTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + ...Trigger.type.modelProperties, + pipelines: { + serializedName: "pipelines", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "TriggerPipelineReference" + } + } + } + } + } + } +}; + +export const TumblingWindowTrigger: coreHttp.CompositeMapper = { + serializedName: "TumblingWindowTrigger", + type: { + name: "Composite", + className: "TumblingWindowTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...Trigger.type.modelProperties, + pipeline: { + serializedName: "pipeline", + type: { + name: "Composite", + className: "TriggerPipelineReference" + } + }, + frequency: { + serializedName: "typeProperties.frequency", + required: true, + type: { + name: "String" + } + }, + interval: { + serializedName: "typeProperties.interval", + required: true, + type: { + name: "Number" + } + }, + startTime: { + serializedName: "typeProperties.startTime", + required: true, + type: { + name: "DateTime" + } + }, + endTime: { + serializedName: "typeProperties.endTime", + type: { + name: "DateTime" + } + }, + delay: { + serializedName: "typeProperties.delay", + type: { + name: "any" + } + }, + maxConcurrency: { + constraints: { + InclusiveMaximum: 50, + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.maxConcurrency", + required: true, + type: { + name: "Number" + } + }, + retryPolicy: { + serializedName: "typeProperties.retryPolicy", + type: { + name: "Composite", + className: "RetryPolicy" + } + }, + dependsOn: { + serializedName: "typeProperties.dependsOn", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DependencyReference" + } + } + } + } + } + } +}; + +export const ChainingTrigger: coreHttp.CompositeMapper = { + serializedName: "ChainingTrigger", + type: { + name: "Composite", + className: "ChainingTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...Trigger.type.modelProperties, + pipeline: { + serializedName: "pipeline", + type: { + name: "Composite", + className: "TriggerPipelineReference" + } + }, + dependsOn: { + serializedName: "typeProperties.dependsOn", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PipelineReference" + } + } + } + }, + runDimension: { + serializedName: "typeProperties.runDimension", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const MappingDataFlow: coreHttp.CompositeMapper = { + serializedName: "MappingDataFlow", + type: { + name: "Composite", + className: "MappingDataFlow", + uberParent: "DataFlow", + polymorphicDiscriminator: DataFlow.type.polymorphicDiscriminator, + modelProperties: { + ...DataFlow.type.modelProperties, + sources: { + serializedName: "typeProperties.sources", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSource" + } + } + } + }, + sinks: { + serializedName: "typeProperties.sinks", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSink" + } + } + } + }, + transformations: { + serializedName: "typeProperties.transformations", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Transformation" + } + } + } + }, + script: { + serializedName: "typeProperties.script", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "DataFlow" + } + } + } + } +}; + +export const DatasetDebugResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Dataset" + } + } + } + } +}; + +export const LinkedServiceDebugResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "LinkedService" + } + } + } + } +}; + +export const ManagedIntegrationRuntime: coreHttp.CompositeMapper = { + serializedName: "Managed", + type: { + name: "Composite", + className: "ManagedIntegrationRuntime", + uberParent: "IntegrationRuntime", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: IntegrationRuntime.type.polymorphicDiscriminator, + modelProperties: { + ...IntegrationRuntime.type.modelProperties, + state: { + serializedName: "state", + readOnly: true, + type: { + name: "String" + } + }, + computeProperties: { + serializedName: "typeProperties.computeProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeComputeProperties" + } + }, + ssisProperties: { + serializedName: "typeProperties.ssisProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeSsisProperties" + } + } + } + } +}; + +export const SelfHostedIntegrationRuntime: coreHttp.CompositeMapper = { + serializedName: "SelfHosted", + type: { + name: "Composite", + className: "SelfHostedIntegrationRuntime", + uberParent: "IntegrationRuntime", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: IntegrationRuntime.type.polymorphicDiscriminator, + modelProperties: { + ...IntegrationRuntime.type.modelProperties, + linkedInfo: { + serializedName: "typeProperties.linkedInfo", + type: { + name: "Composite", + className: "LinkedIntegrationRuntimeType" + } + } + } + } +}; + +export const SecureString: coreHttp.CompositeMapper = { + serializedName: "SecureString", + type: { + name: "Composite", + className: "SecureString", + uberParent: "SecretBase", + polymorphicDiscriminator: SecretBase.type.polymorphicDiscriminator, + modelProperties: { + ...SecretBase.type.modelProperties, + value: { + serializedName: "value", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const AzureKeyVaultSecretReference: coreHttp.CompositeMapper = { + serializedName: "AzureKeyVaultSecret", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference", + uberParent: "SecretBase", + polymorphicDiscriminator: SecretBase.type.polymorphicDiscriminator, + modelProperties: { + ...SecretBase.type.modelProperties, + store: { + serializedName: "store", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + secretName: { + serializedName: "secretName", + required: true, + type: { + name: "any" + } + }, + secretVersion: { + serializedName: "secretVersion", + type: { + name: "any" + } + } + } + } +}; + +export const DataFlowSource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowSource", + modelProperties: { + ...Transformation.type.modelProperties, + dataset: { + serializedName: "dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } +}; + +export const DataFlowSink: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowSink", + modelProperties: { + ...Transformation.type.modelProperties, + dataset: { + serializedName: "dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } +}; + +export const AzureBlobStorageLocation: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorageLocation", + type: { + name: "Composite", + className: "AzureBlobStorageLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + container: { + serializedName: "container", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSLocation: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSLocation", + type: { + name: "Composite", + className: "AzureBlobFSLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + fileSystem: { + serializedName: "fileSystem", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreLocation: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreLocation", + type: { + name: "Composite", + className: "AzureDataLakeStoreLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const AmazonS3Location: coreHttp.CompositeMapper = { + serializedName: "AmazonS3Location", + type: { + name: "Composite", + className: "AmazonS3Location", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + bucketName: { + serializedName: "bucketName", + type: { + name: "any" + } + }, + version: { + serializedName: "version", + type: { + name: "any" + } + } + } + } +}; + +export const FileServerLocation: coreHttp.CompositeMapper = { + serializedName: "FileServerLocation", + type: { + name: "Composite", + className: "FileServerLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const AzureFileStorageLocation: coreHttp.CompositeMapper = { + serializedName: "AzureFileStorageLocation", + type: { + name: "Composite", + className: "AzureFileStorageLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const GoogleCloudStorageLocation: coreHttp.CompositeMapper = { + serializedName: "GoogleCloudStorageLocation", + type: { + name: "Composite", + className: "GoogleCloudStorageLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + bucketName: { + serializedName: "bucketName", + type: { + name: "any" + } + }, + version: { + serializedName: "version", + type: { + name: "any" + } + } + } + } +}; + +export const FtpServerLocation: coreHttp.CompositeMapper = { + serializedName: "FtpServerLocation", + type: { + name: "Composite", + className: "FtpServerLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const SftpLocation: coreHttp.CompositeMapper = { + serializedName: "SftpLocation", + type: { + name: "Composite", + className: "SftpLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const HttpServerLocation: coreHttp.CompositeMapper = { + serializedName: "HttpServerLocation", + type: { + name: "Composite", + className: "HttpServerLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + relativeUrl: { + serializedName: "relativeUrl", + type: { + name: "any" + } + } + } + } +}; + +export const HdfsLocation: coreHttp.CompositeMapper = { + serializedName: "HdfsLocation", + type: { + name: "Composite", + className: "HdfsLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const TextFormat: coreHttp.CompositeMapper = { + serializedName: "TextFormat", + type: { + name: "Composite", + className: "TextFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties, + columnDelimiter: { + serializedName: "columnDelimiter", + type: { + name: "any" + } + }, + rowDelimiter: { + serializedName: "rowDelimiter", + type: { + name: "any" + } + }, + escapeChar: { + serializedName: "escapeChar", + type: { + name: "any" + } + }, + quoteChar: { + serializedName: "quoteChar", + type: { + name: "any" + } + }, + nullValue: { + serializedName: "nullValue", + type: { + name: "any" + } + }, + encodingName: { + serializedName: "encodingName", + type: { + name: "any" + } + }, + treatEmptyAsNull: { + serializedName: "treatEmptyAsNull", + type: { + name: "any" + } + }, + skipLineCount: { + serializedName: "skipLineCount", + type: { + name: "any" + } + }, + firstRowAsHeader: { + serializedName: "firstRowAsHeader", + type: { + name: "any" + } + } + } + } +}; + +export const JsonFormat: coreHttp.CompositeMapper = { + serializedName: "JsonFormat", + type: { + name: "Composite", + className: "JsonFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties, + filePattern: { + serializedName: "filePattern", + type: { + name: "String" + } + }, + nestingSeparator: { + serializedName: "nestingSeparator", + type: { + name: "any" + } + }, + encodingName: { + serializedName: "encodingName", + type: { + name: "any" + } + }, + jsonNodeReference: { + serializedName: "jsonNodeReference", + type: { + name: "any" + } + }, + jsonPathDefinition: { + serializedName: "jsonPathDefinition", + type: { + name: "any" + } + } + } + } +}; + +export const AvroFormat: coreHttp.CompositeMapper = { + serializedName: "AvroFormat", + type: { + name: "Composite", + className: "AvroFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties + } + } +}; + +export const OrcFormat: coreHttp.CompositeMapper = { + serializedName: "OrcFormat", + type: { + name: "Composite", + className: "OrcFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties + } + } +}; + +export const ParquetFormat: coreHttp.CompositeMapper = { + serializedName: "ParquetFormat", + type: { + name: "Composite", + className: "ParquetFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties + } + } +}; + +export const DatasetBZip2Compression: coreHttp.CompositeMapper = { + serializedName: "BZip2", + type: { + name: "Composite", + className: "DatasetBZip2Compression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties + } + } +}; + +export const DatasetGZipCompression: coreHttp.CompositeMapper = { + serializedName: "GZip", + type: { + name: "Composite", + className: "DatasetGZipCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties, + level: { + serializedName: "level", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetDeflateCompression: coreHttp.CompositeMapper = { + serializedName: "Deflate", + type: { + name: "Composite", + className: "DatasetDeflateCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties, + level: { + serializedName: "level", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetZipDeflateCompression: coreHttp.CompositeMapper = { + serializedName: "ZipDeflate", + type: { + name: "Composite", + className: "DatasetZipDeflateCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties, + level: { + serializedName: "level", + type: { + name: "String" + } + } + } + } +}; + +export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { + serializedName: "Anonymous", + type: { + name: "Composite", + className: "WebAnonymousAuthentication", + uberParent: "WebLinkedServiceTypeProperties", + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + modelProperties: { + ...WebLinkedServiceTypeProperties.type.modelProperties + } + } +}; + +export const WebBasicAuthentication: coreHttp.CompositeMapper = { + serializedName: "Basic", + type: { + name: "Composite", + className: "WebBasicAuthentication", + uberParent: "WebLinkedServiceTypeProperties", + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + modelProperties: { + ...WebLinkedServiceTypeProperties.type.modelProperties, + username: { + serializedName: "username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecretBase" + } + } + } + } +}; + +export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { + serializedName: "ClientCertificate", + type: { + name: "Composite", + className: "WebClientCertificateAuthentication", + uberParent: "WebLinkedServiceTypeProperties", + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + modelProperties: { + ...WebLinkedServiceTypeProperties.type.modelProperties, + pfx: { + serializedName: "pfx", + type: { + name: "Composite", + className: "SecretBase" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecretBase" + } + } + } + } +}; + +export const AzureBlobStorageReadSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorageReadSettings", + type: { + name: "Composite", + className: "AzureBlobStorageReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + prefix: { + serializedName: "prefix", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSReadSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSReadSettings", + type: { + name: "Composite", + className: "AzureBlobFSReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreReadSettings: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreReadSettings", + type: { + name: "Composite", + className: "AzureDataLakeStoreReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonS3ReadSettings: coreHttp.CompositeMapper = { + serializedName: "AmazonS3ReadSettings", + type: { + name: "Composite", + className: "AmazonS3ReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + prefix: { + serializedName: "prefix", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const FileServerReadSettings: coreHttp.CompositeMapper = { + serializedName: "FileServerReadSettings", + type: { + name: "Composite", + className: "FileServerReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const AzureFileStorageReadSettings: coreHttp.CompositeMapper = { + serializedName: "AzureFileStorageReadSettings", + type: { + name: "Composite", + className: "AzureFileStorageReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleCloudStorageReadSettings: coreHttp.CompositeMapper = { + serializedName: "GoogleCloudStorageReadSettings", + type: { + name: "Composite", + className: "GoogleCloudStorageReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + prefix: { + serializedName: "prefix", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const FtpReadSettings: coreHttp.CompositeMapper = { + serializedName: "FtpReadSettings", + type: { + name: "Composite", + className: "FtpReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + useBinaryTransfer: { + serializedName: "useBinaryTransfer", + type: { + name: "Boolean" + } + } + } + } +}; + +export const SftpReadSettings: coreHttp.CompositeMapper = { + serializedName: "SftpReadSettings", + type: { + name: "Composite", + className: "SftpReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const HttpReadSettings: coreHttp.CompositeMapper = { + serializedName: "HttpReadSettings", + type: { + name: "Composite", + className: "HttpReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + requestMethod: { + serializedName: "requestMethod", + type: { + name: "any" + } + }, + requestBody: { + serializedName: "requestBody", + type: { + name: "any" + } + }, + additionalHeaders: { + serializedName: "additionalHeaders", + type: { + name: "any" + } + }, + requestTimeout: { + serializedName: "requestTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const HdfsReadSettings: coreHttp.CompositeMapper = { + serializedName: "HdfsReadSettings", + type: { + name: "Composite", + className: "HdfsReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + }, + distcpSettings: { + serializedName: "distcpSettings", + type: { + name: "Composite", + className: "DistcpSettings" + } + } + } + } +}; + +export const SftpWriteSettings: coreHttp.CompositeMapper = { + serializedName: "SftpWriteSettings", + type: { + name: "Composite", + className: "SftpWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + operationTimeout: { + serializedName: "operationTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobStorageWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorageWriteSettings", + type: { + name: "Composite", + className: "AzureBlobStorageWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + blockSizeInMB: { + serializedName: "blockSizeInMB", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSWriteSettings", + type: { + name: "Composite", + className: "AzureBlobFSWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + blockSizeInMB: { + serializedName: "blockSizeInMB", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreWriteSettings", + type: { + name: "Composite", + className: "AzureDataLakeStoreWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties + } + } +}; + +export const FileServerWriteSettings: coreHttp.CompositeMapper = { + serializedName: "FileServerWriteSettings", + type: { + name: "Composite", + className: "FileServerWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties + } + } +}; + +export const DelimitedTextReadSettings: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextReadSettings", + type: { + name: "Composite", + className: "DelimitedTextReadSettings", + uberParent: "FormatReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatReadSettings.type.modelProperties, + skipLineCount: { + serializedName: "skipLineCount", + type: { + name: "any" + } + } + } + } +}; + +export const AvroWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AvroWriteSettings", + type: { + name: "Composite", + className: "AvroWriteSettings", + uberParent: "FormatWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatWriteSettings.type.modelProperties, + recordName: { + serializedName: "recordName", + type: { + name: "String" + } + }, + recordNamespace: { + serializedName: "recordNamespace", + type: { + name: "String" + } + } + } + } +}; + +export const DelimitedTextWriteSettings: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextWriteSettings", + type: { + name: "Composite", + className: "DelimitedTextWriteSettings", + uberParent: "FormatWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatWriteSettings.type.modelProperties, + quoteAllText: { + serializedName: "quoteAllText", + type: { + name: "any" + } + }, + fileExtension: { + serializedName: "fileExtension", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const JsonWriteSettings: coreHttp.CompositeMapper = { + serializedName: "JsonWriteSettings", + type: { + name: "Composite", + className: "JsonWriteSettings", + uberParent: "FormatWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatWriteSettings.type.modelProperties, + filePattern: { + serializedName: "filePattern", + type: { + name: "String" + } + } + } + } +}; + +export const AvroSource: coreHttp.CompositeMapper = { + serializedName: "AvroSource", + type: { + name: "Composite", + className: "AvroSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const ParquetSource: coreHttp.CompositeMapper = { + serializedName: "ParquetSource", + type: { + name: "Composite", + className: "ParquetSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const DelimitedTextSource: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextSource", + type: { + name: "Composite", + className: "DelimitedTextSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "DelimitedTextReadSettings" + } + } + } + } +}; + +export const JsonSource: coreHttp.CompositeMapper = { + serializedName: "JsonSource", + type: { + name: "Composite", + className: "JsonSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const OrcSource: coreHttp.CompositeMapper = { + serializedName: "OrcSource", + type: { + name: "Composite", + className: "OrcSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const BinarySource: coreHttp.CompositeMapper = { + serializedName: "BinarySource", + type: { + name: "Composite", + className: "BinarySource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const TabularSource: coreHttp.CompositeMapper = { + serializedName: "TabularSource", + type: { + name: "Composite", + className: "TabularSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + ...CopySource.type.modelProperties, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const BlobSource: coreHttp.CompositeMapper = { + serializedName: "BlobSource", + type: { + name: "Composite", + className: "BlobSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + treatEmptyAsNull: { + serializedName: "treatEmptyAsNull", + type: { + name: "any" + } + }, + skipHeaderLineCount: { + serializedName: "skipHeaderLineCount", + type: { + name: "any" + } + }, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const DocumentDbCollectionSource: coreHttp.CompositeMapper = { + serializedName: "DocumentDbCollectionSource", + type: { + name: "Composite", + className: "DocumentDbCollectionSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + nestingSeparator: { + serializedName: "nestingSeparator", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbSqlApiSource: coreHttp.CompositeMapper = { + serializedName: "CosmosDbSqlApiSource", + type: { + name: "Composite", + className: "CosmosDbSqlApiSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + pageSize: { + serializedName: "pageSize", + type: { + name: "any" + } + }, + preferredRegions: { + serializedName: "preferredRegions", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsSource: coreHttp.CompositeMapper = { + serializedName: "DynamicsSource", + type: { + name: "Composite", + className: "DynamicsSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsCrmSource: coreHttp.CompositeMapper = { + serializedName: "DynamicsCrmSource", + type: { + name: "Composite", + className: "DynamicsCrmSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const CommonDataServiceForAppsSource: coreHttp.CompositeMapper = { + serializedName: "CommonDataServiceForAppsSource", + type: { + name: "Composite", + className: "CommonDataServiceForAppsSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const RelationalSource: coreHttp.CompositeMapper = { + serializedName: "RelationalSource", + type: { + name: "Composite", + className: "RelationalSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MicrosoftAccessSource: coreHttp.CompositeMapper = { + serializedName: "MicrosoftAccessSource", + type: { + name: "Composite", + className: "MicrosoftAccessSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ODataSource: coreHttp.CompositeMapper = { + serializedName: "ODataSource", + type: { + name: "Composite", + className: "ODataSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudSource: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloudSource", + type: { + name: "Composite", + className: "SalesforceServiceCloudSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + readBehavior: { + serializedName: "readBehavior", + type: { + name: "String" + } + } + } + } +}; + +export const RestSource: coreHttp.CompositeMapper = { + serializedName: "RestSource", + type: { + name: "Composite", + className: "RestSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + requestMethod: { + serializedName: "requestMethod", + type: { + name: "any" + } + }, + requestBody: { + serializedName: "requestBody", + type: { + name: "any" + } + }, + additionalHeaders: { + serializedName: "additionalHeaders", + type: { + name: "any" + } + }, + paginationRules: { + serializedName: "paginationRules", + type: { + name: "any" + } + }, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } + }, + requestInterval: { + serializedName: "requestInterval", + type: { + name: "any" + } + } + } + } +}; + +export const FileSystemSource: coreHttp.CompositeMapper = { + serializedName: "FileSystemSource", + type: { + name: "Composite", + className: "FileSystemSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const HdfsSource: coreHttp.CompositeMapper = { + serializedName: "HdfsSource", + type: { + name: "Composite", + className: "HdfsSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + distcpSettings: { + serializedName: "distcpSettings", + type: { + name: "Composite", + className: "DistcpSettings" + } + } + } + } +}; + +export const AzureDataExplorerSource: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorerSource", + type: { + name: "Composite", + className: "AzureDataExplorerSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + required: true, + type: { + name: "any" + } + }, + noTruncation: { + serializedName: "noTruncation", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const OracleSource: coreHttp.CompositeMapper = { + serializedName: "OracleSource", + type: { + name: "Composite", + className: "OracleSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + oracleReaderQuery: { + serializedName: "oracleReaderQuery", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "OraclePartitionSettings" + } + } + } + } +}; + +export const WebSource: coreHttp.CompositeMapper = { + serializedName: "WebSource", + type: { + name: "Composite", + className: "WebSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties + } + } +}; + +export const MongoDbSource: coreHttp.CompositeMapper = { + serializedName: "MongoDbSource", + type: { + name: "Composite", + className: "MongoDbSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbV2Source: coreHttp.CompositeMapper = { + serializedName: "MongoDbV2Source", + type: { + name: "Composite", + className: "MongoDbV2Source", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + filter: { + serializedName: "filter", + type: { + name: "any" + } + }, + cursorMethods: { + serializedName: "cursorMethods", + type: { + name: "Composite", + className: "MongoDbCursorMethodsProperties" + } + }, + batchSize: { + serializedName: "batchSize", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiSource: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApiSource", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + filter: { + serializedName: "filter", + type: { + name: "any" + } + }, + cursorMethods: { + serializedName: "cursorMethods", + type: { + name: "Composite", + className: "MongoDbCursorMethodsProperties" + } + }, + batchSize: { + serializedName: "batchSize", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const Office365Source: coreHttp.CompositeMapper = { + serializedName: "Office365Source", + type: { + name: "Composite", + className: "Office365Source", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + allowedGroups: { + serializedName: "allowedGroups", + type: { + name: "any" + } + }, + userScopeFilterUri: { + serializedName: "userScopeFilterUri", + type: { + name: "any" + } + }, + dateFilterColumn: { + serializedName: "dateFilterColumn", + type: { + name: "any" + } + }, + startTime: { + serializedName: "startTime", + type: { + name: "any" + } + }, + endTime: { + serializedName: "endTime", + type: { + name: "any" + } + }, + outputColumns: { + serializedName: "outputColumns", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreSource: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreSource", + type: { + name: "Composite", + className: "AzureDataLakeStoreSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSSource: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSSource", + type: { + name: "Composite", + className: "AzureBlobFSSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + treatEmptyAsNull: { + serializedName: "treatEmptyAsNull", + type: { + name: "any" + } + }, + skipHeaderLineCount: { + serializedName: "skipHeaderLineCount", + type: { + name: "any" + } + }, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const HttpSource: coreHttp.CompositeMapper = { + serializedName: "HttpSource", + type: { + name: "Composite", + className: "HttpSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const DelimitedTextSink: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextSink", + type: { + name: "Composite", + className: "DelimitedTextSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "DelimitedTextWriteSettings" + } + } + } + } +}; + +export const JsonSink: coreHttp.CompositeMapper = { + serializedName: "JsonSink", + type: { + name: "Composite", + className: "JsonSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "JsonWriteSettings" + } + } + } + } +}; + +export const OrcSink: coreHttp.CompositeMapper = { + serializedName: "OrcSink", + type: { + name: "Composite", + className: "OrcSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + } + } + } +}; + +export const AzurePostgreSqlSink: coreHttp.CompositeMapper = { + serializedName: "AzurePostgreSqlSink", + type: { + name: "Composite", + className: "AzurePostgreSqlSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMySqlSink: coreHttp.CompositeMapper = { + serializedName: "AzureMySqlSink", + type: { + name: "Composite", + className: "AzureMySqlSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const SapCloudForCustomerSink: coreHttp.CompositeMapper = { + serializedName: "SapCloudForCustomerSink", + type: { + name: "Composite", + className: "SapCloudForCustomerSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + } + } + } +}; + +export const AzureQueueSink: coreHttp.CompositeMapper = { + serializedName: "AzureQueueSink", + type: { + name: "Composite", + className: "AzureQueueSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties + } + } +}; + +export const AzureTableSink: coreHttp.CompositeMapper = { + serializedName: "AzureTableSink", + type: { + name: "Composite", + className: "AzureTableSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + azureTableDefaultPartitionKeyValue: { + serializedName: "azureTableDefaultPartitionKeyValue", + type: { + name: "any" + } + }, + azureTablePartitionKeyName: { + serializedName: "azureTablePartitionKeyName", + type: { + name: "any" + } + }, + azureTableRowKeyName: { + serializedName: "azureTableRowKeyName", + type: { + name: "any" + } + }, + azureTableInsertType: { + serializedName: "azureTableInsertType", + type: { + name: "any" + } + } + } + } +}; + +export const AvroSink: coreHttp.CompositeMapper = { + serializedName: "AvroSink", + type: { + name: "Composite", + className: "AvroSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "AvroWriteSettings" + } + } + } + } +}; + +export const ParquetSink: coreHttp.CompositeMapper = { + serializedName: "ParquetSink", + type: { + name: "Composite", + className: "ParquetSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + } + } + } +}; + +export const BinarySink: coreHttp.CompositeMapper = { + serializedName: "BinarySink", + type: { + name: "Composite", + className: "BinarySink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + } + } + } +}; + +export const BlobSink: coreHttp.CompositeMapper = { + serializedName: "BlobSink", + type: { + name: "Composite", + className: "BlobSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + blobWriterOverwriteFiles: { + serializedName: "blobWriterOverwriteFiles", + type: { + name: "any" + } + }, + blobWriterDateTimeFormat: { + serializedName: "blobWriterDateTimeFormat", + type: { + name: "any" + } + }, + blobWriterAddHeader: { + serializedName: "blobWriterAddHeader", + type: { + name: "any" + } + }, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const FileSystemSink: coreHttp.CompositeMapper = { + serializedName: "FileSystemSink", + type: { + name: "Composite", + className: "FileSystemSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const DocumentDbCollectionSink: coreHttp.CompositeMapper = { + serializedName: "DocumentDbCollectionSink", + type: { + name: "Composite", + className: "DocumentDbCollectionSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + nestingSeparator: { + serializedName: "nestingSeparator", + type: { + name: "any" + } + }, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbSqlApiSink: coreHttp.CompositeMapper = { + serializedName: "CosmosDbSqlApiSink", + type: { + name: "Composite", + className: "CosmosDbSqlApiSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const SqlSink: coreHttp.CompositeMapper = { + serializedName: "SqlSink", + type: { + name: "Composite", + className: "SqlSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + sqlWriterStoredProcedureName: { + serializedName: "sqlWriterStoredProcedureName", + type: { + name: "any" + } + }, + sqlWriterTableType: { + serializedName: "sqlWriterTableType", + type: { + name: "any" + } + }, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + storedProcedureTableTypeParameterName: { + serializedName: "storedProcedureTableTypeParameterName", + type: { + name: "any" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const SqlServerSink: coreHttp.CompositeMapper = { + serializedName: "SqlServerSink", + type: { + name: "Composite", + className: "SqlServerSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + sqlWriterStoredProcedureName: { + serializedName: "sqlWriterStoredProcedureName", + type: { + name: "any" + } + }, + sqlWriterTableType: { + serializedName: "sqlWriterTableType", + type: { + name: "any" + } + }, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + storedProcedureTableTypeParameterName: { + serializedName: "storedProcedureTableTypeParameterName", + type: { + name: "any" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlSink: coreHttp.CompositeMapper = { + serializedName: "AzureSqlSink", + type: { + name: "Composite", + className: "AzureSqlSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + sqlWriterStoredProcedureName: { + serializedName: "sqlWriterStoredProcedureName", + type: { + name: "any" + } + }, + sqlWriterTableType: { + serializedName: "sqlWriterTableType", + type: { + name: "any" + } + }, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + storedProcedureTableTypeParameterName: { + serializedName: "storedProcedureTableTypeParameterName", + type: { + name: "any" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const SqlMISink: coreHttp.CompositeMapper = { + serializedName: "SqlMISink", + type: { + name: "Composite", + className: "SqlMISink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + sqlWriterStoredProcedureName: { + serializedName: "sqlWriterStoredProcedureName", + type: { + name: "any" + } + }, + sqlWriterTableType: { + serializedName: "sqlWriterTableType", + type: { + name: "any" + } + }, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + storedProcedureTableTypeParameterName: { + serializedName: "storedProcedureTableTypeParameterName", + type: { + name: "any" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const SqlDWSink: coreHttp.CompositeMapper = { + serializedName: "SqlDWSink", + type: { + name: "Composite", + className: "SqlDWSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + allowPolyBase: { + serializedName: "allowPolyBase", + type: { + name: "any" + } + }, + polyBaseSettings: { + serializedName: "polyBaseSettings", + type: { + name: "Composite", + className: "PolybaseSettings" + } + }, + allowCopyCommand: { + serializedName: "allowCopyCommand", + type: { + name: "any" + } + }, + copyCommandSettings: { + serializedName: "copyCommandSettings", + type: { + name: "Composite", + className: "DWCopyCommandSettings" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const OracleSink: coreHttp.CompositeMapper = { + serializedName: "OracleSink", + type: { + name: "Composite", + className: "OracleSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreSink: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreSink", + type: { + name: "Composite", + className: "AzureDataLakeStoreSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + }, + enableAdlsSingleFileParallel: { + serializedName: "enableAdlsSingleFileParallel", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSSink: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSSink", + type: { + name: "Composite", + className: "AzureBlobFSSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSearchIndexSink: coreHttp.CompositeMapper = { + serializedName: "AzureSearchIndexSink", + type: { + name: "Composite", + className: "AzureSearchIndexSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + } + } + } +}; + +export const OdbcSink: coreHttp.CompositeMapper = { + serializedName: "OdbcSink", + type: { + name: "Composite", + className: "OdbcSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const InformixSink: coreHttp.CompositeMapper = { + serializedName: "InformixSink", + type: { + name: "Composite", + className: "InformixSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const MicrosoftAccessSink: coreHttp.CompositeMapper = { + serializedName: "MicrosoftAccessSink", + type: { + name: "Composite", + className: "MicrosoftAccessSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsSink: coreHttp.CompositeMapper = { + serializedName: "DynamicsSink", + type: { + name: "Composite", + className: "DynamicsSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + required: true, + type: { + name: "String" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + }, + alternateKeyName: { + serializedName: "alternateKeyName", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsCrmSink: coreHttp.CompositeMapper = { + serializedName: "DynamicsCrmSink", + type: { + name: "Composite", + className: "DynamicsCrmSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + required: true, + type: { + name: "String" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + }, + alternateKeyName: { + serializedName: "alternateKeyName", + type: { + name: "any" + } + } + } + } +}; + +export const CommonDataServiceForAppsSink: coreHttp.CompositeMapper = { + serializedName: "CommonDataServiceForAppsSink", + type: { + name: "Composite", + className: "CommonDataServiceForAppsSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + required: true, + type: { + name: "String" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + }, + alternateKeyName: { + serializedName: "alternateKeyName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataExplorerSink: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorerSink", + type: { + name: "Composite", + className: "AzureDataExplorerSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + ingestionMappingName: { + serializedName: "ingestionMappingName", + type: { + name: "any" + } + }, + ingestionMappingAsJson: { + serializedName: "ingestionMappingAsJson", + type: { + name: "any" + } + }, + flushImmediately: { + serializedName: "flushImmediately", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceSink: coreHttp.CompositeMapper = { + serializedName: "SalesforceSink", + type: { + name: "Composite", + className: "SalesforceSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + }, + externalIdFieldName: { + serializedName: "externalIdFieldName", + type: { + name: "any" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudSink: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloudSink", + type: { + name: "Composite", + className: "SalesforceServiceCloudSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + }, + externalIdFieldName: { + serializedName: "externalIdFieldName", + type: { + name: "any" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiSink: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApiSink", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const TabularTranslator: coreHttp.CompositeMapper = { + serializedName: "TabularTranslator", + type: { + name: "Composite", + className: "TabularTranslator", + uberParent: "CopyTranslator", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopyTranslator.type.polymorphicDiscriminator, + modelProperties: { + ...CopyTranslator.type.modelProperties, + columnMappings: { + serializedName: "columnMappings", + type: { + name: "any" + } + }, + schemaMapping: { + serializedName: "schemaMapping", + type: { + name: "any" + } + }, + collectionReference: { + serializedName: "collectionReference", + type: { + name: "any" + } + }, + mapComplexValuesToString: { + serializedName: "mapComplexValuesToString", + type: { + name: "any" + } + }, + mappings: { + serializedName: "mappings", + type: { + name: "any" + } + } + } + } +}; + +export const TriggerDependencyReference: coreHttp.CompositeMapper = { + serializedName: "TriggerDependencyReference", + type: { + name: "Composite", + className: "TriggerDependencyReference", + uberParent: "DependencyReference", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + ...DependencyReference.type.modelProperties, + referenceTrigger: { + serializedName: "referenceTrigger", + type: { + name: "Composite", + className: "TriggerReference" + } + } + } + } +}; + +export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMapper = { + serializedName: "SelfDependencyTumblingWindowTriggerReference", + type: { + name: "Composite", + className: "SelfDependencyTumblingWindowTriggerReference", + uberParent: "DependencyReference", + polymorphicDiscriminator: DependencyReference.type.polymorphicDiscriminator, + modelProperties: { + ...DependencyReference.type.modelProperties, + offset: { + constraints: { + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), + MaxLength: 15, + MinLength: 8 + }, + serializedName: "offset", + required: true, + type: { + name: "String" + } + }, + size: { + constraints: { + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), + MaxLength: 15, + MinLength: 8 + }, + serializedName: "size", + type: { + name: "String" + } + } + } + } +}; + +export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper = { + serializedName: "Key", + type: { + name: "Composite", + className: "LinkedIntegrationRuntimeKeyAuthorization", + uberParent: "LinkedIntegrationRuntimeType", + polymorphicDiscriminator: + LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedIntegrationRuntimeType.type.modelProperties, + key: { + serializedName: "key", + type: { + name: "Composite", + className: "SecureString" + } + } + } + } +}; + +export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper = { + serializedName: "RBAC", + type: { + name: "Composite", + className: "LinkedIntegrationRuntimeRbacAuthorization", + uberParent: "LinkedIntegrationRuntimeType", + polymorphicDiscriminator: + LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedIntegrationRuntimeType.type.modelProperties, + resourceId: { + serializedName: "resourceId", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const LinkedServiceResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "LinkedService" + } + } + } + } +}; + +export const DatasetResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Dataset" + } + } + } + } +}; + +export const PipelineResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineResource", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + ...AzureEntityResource.type.modelProperties, + description: { + serializedName: "properties.description", + type: { + name: "String" + } + }, + activities: { + serializedName: "properties.activities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + }, + parameters: { + serializedName: "properties.parameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "ParameterSpecification" } + } + } + }, + variables: { + serializedName: "properties.variables", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "VariableSpecification" } + } + } + }, + concurrency: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "properties.concurrency", + type: { + name: "Number" + } + }, + annotations: { + serializedName: "properties.annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + runDimensions: { + serializedName: "properties.runDimensions", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + folder: { + serializedName: "properties.folder", + type: { + name: "Composite", + className: "PipelineFolder" + } + } + } + } +}; + +export const TriggerResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Trigger" + } + } + } + } +}; + +export const DataFlowResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "DataFlow" + } + } + } + } +}; + +export const SparkJobDefinitionResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobDefinitionResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "SparkJobDefinition" + } + } + } + } +}; + +export const IntegrationRuntimeResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "IntegrationRuntime" + } + } + } + } +}; + +export const SubResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SubResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties + } + } +}; + +export const RerunTriggerResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RerunTriggerResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "RerunTumblingWindowTrigger" + } + } + } + } +}; + +export const Workspace: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Workspace", + modelProperties: { + ...TrackedResource.type.modelProperties, + identity: { + serializedName: "identity", + type: { + name: "Composite", + className: "ManagedIdentity" + } + }, + defaultDataLakeStorage: { + serializedName: "properties.defaultDataLakeStorage", + type: { + name: "Composite", + className: "DataLakeStorageAccountDetails" + } + }, + sqlAdministratorLoginPassword: { + serializedName: "properties.sqlAdministratorLoginPassword", + type: { + name: "String" + } + }, + managedResourceGroupName: { + serializedName: "properties.managedResourceGroupName", + type: { + name: "String" + } + }, + provisioningState: { + serializedName: "properties.provisioningState", + readOnly: true, + type: { + name: "String" + } + }, + sqlAdministratorLogin: { + serializedName: "properties.sqlAdministratorLogin", + type: { + name: "String" + } + }, + virtualNetworkProfile: { + serializedName: "properties.virtualNetworkProfile", + type: { + name: "Composite", + className: "VirtualNetworkProfile" + } + }, + connectivityEndpoints: { + serializedName: "properties.connectivityEndpoints", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + managedVirtualNetwork: { + serializedName: "properties.managedVirtualNetwork", + type: { + name: "String" + } + }, + privateEndpointConnections: { + serializedName: "properties.privateEndpointConnections", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PrivateEndpointConnection" + } + } + } + }, + encryption: { + serializedName: "properties.encryption", + type: { + name: "Composite", + className: "EncryptionDetails" + } + }, + workspaceUID: { + serializedName: "properties.workspaceUID", + readOnly: true, + type: { + name: "Uuid" + } + }, + extraProperties: { + serializedName: "properties.extraProperties", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + managedVirtualNetworkSettings: { + serializedName: "properties.managedVirtualNetworkSettings", + type: { + name: "Composite", + className: "ManagedVirtualNetworkSettings" + } + }, + workspaceRepositoryConfiguration: { + serializedName: "properties.workspaceRepositoryConfiguration", + type: { + name: "Composite", + className: "WorkspaceRepositoryConfiguration" + } + }, + purviewConfiguration: { + serializedName: "properties.purviewConfiguration", + type: { + name: "Composite", + className: "PurviewConfiguration" + } + } + } + } +}; + +export const SqlPool: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlPool", + modelProperties: { + ...TrackedResource.type.modelProperties, + sku: { + serializedName: "sku", + type: { + name: "Composite", + className: "Sku" + } + }, + maxSizeBytes: { + serializedName: "properties.maxSizeBytes", + type: { + name: "Number" + } + }, + collation: { + serializedName: "properties.collation", + type: { + name: "String" + } + }, + sourceDatabaseId: { + serializedName: "properties.sourceDatabaseId", + type: { + name: "String" + } + }, + recoverableDatabaseId: { + serializedName: "properties.recoverableDatabaseId", + type: { + name: "String" + } + }, + provisioningState: { + serializedName: "properties.provisioningState", + type: { + name: "String" + } + }, + status: { + serializedName: "properties.status", + type: { + name: "String" + } + }, + restorePointInTime: { + serializedName: "properties.restorePointInTime", + type: { + name: "String" + } + }, + createMode: { + serializedName: "properties.createMode", + type: { + name: "String" + } + }, + creationDate: { + serializedName: "properties.creationDate", + type: { + name: "DateTime" + } + } + } + } +}; + +export const BigDataPoolResourceInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "BigDataPoolResourceInfo", + modelProperties: { + ...TrackedResource.type.modelProperties, + provisioningState: { + serializedName: "properties.provisioningState", + type: { + name: "String" + } + }, + autoScale: { + serializedName: "properties.autoScale", + type: { + name: "Composite", + className: "AutoScaleProperties" + } + }, + creationDate: { + serializedName: "properties.creationDate", + type: { + name: "DateTime" + } + }, + autoPause: { + serializedName: "properties.autoPause", + type: { + name: "Composite", + className: "AutoPauseProperties" + } + }, + isComputeIsolationEnabled: { + serializedName: "properties.isComputeIsolationEnabled", + type: { + name: "Boolean" + } + }, + haveLibraryRequirementsChanged: { + serializedName: "properties.haveLibraryRequirementsChanged", + type: { + name: "Boolean" + } + }, + sessionLevelPackagesEnabled: { + serializedName: "properties.sessionLevelPackagesEnabled", + type: { + name: "Boolean" + } + }, + sparkEventsFolder: { + serializedName: "properties.sparkEventsFolder", + type: { + name: "String" + } + }, + nodeCount: { + serializedName: "properties.nodeCount", + type: { + name: "Number" + } + }, + libraryRequirements: { + serializedName: "properties.libraryRequirements", + type: { + name: "Composite", + className: "LibraryRequirements" + } + }, + sparkConfigProperties: { + serializedName: "properties.sparkConfigProperties", + type: { + name: "Composite", + className: "LibraryRequirements" + } + }, + sparkVersion: { + serializedName: "properties.sparkVersion", + type: { + name: "String" + } + }, + defaultSparkLogFolder: { + serializedName: "properties.defaultSparkLogFolder", + type: { + name: "String" + } + }, + nodeSize: { + serializedName: "properties.nodeSize", + type: { + name: "String" + } + }, + nodeSizeFamily: { + serializedName: "properties.nodeSizeFamily", + type: { + name: "String" + } + } + } + } +}; + +export const CopyActivity: coreHttp.CompositeMapper = { + serializedName: "Copy", + type: { + name: "Composite", + className: "CopyActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + inputs: { + serializedName: "inputs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + }, + outputs: { + serializedName: "outputs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + }, + source: { + serializedName: "typeProperties.source", + type: { + name: "Composite", + className: "CopySource" + } + }, + sink: { + serializedName: "typeProperties.sink", + type: { + name: "Composite", + className: "CopySink" + } + }, + translator: { + serializedName: "typeProperties.translator", + type: { + name: "any" + } + }, + enableStaging: { + serializedName: "typeProperties.enableStaging", + type: { + name: "any" + } + }, + stagingSettings: { + serializedName: "typeProperties.stagingSettings", + type: { + name: "Composite", + className: "StagingSettings" + } + }, + parallelCopies: { + serializedName: "typeProperties.parallelCopies", + type: { + name: "any" + } + }, + dataIntegrationUnits: { + serializedName: "typeProperties.dataIntegrationUnits", + type: { + name: "any" + } + }, + enableSkipIncompatibleRow: { + serializedName: "typeProperties.enableSkipIncompatibleRow", + type: { + name: "any" + } + }, + redirectIncompatibleRowSettings: { + serializedName: "typeProperties.redirectIncompatibleRowSettings", + type: { + name: "Composite", + className: "RedirectIncompatibleRowSettings" + } + }, + preserveRules: { + serializedName: "typeProperties.preserveRules", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + preserve: { + serializedName: "typeProperties.preserve", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + } + } + } +}; + +export const HDInsightHiveActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightHive", + type: { + name: "Composite", + className: "HDInsightHiveActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storageLinkedServices: { + serializedName: "typeProperties.storageLinkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + scriptPath: { + serializedName: "typeProperties.scriptPath", + type: { + name: "any" + } + }, + scriptLinkedService: { + serializedName: "typeProperties.scriptLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + defines: { + serializedName: "typeProperties.defines", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + variables: { + serializedName: "typeProperties.variables", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + queryTimeout: { + serializedName: "typeProperties.queryTimeout", + type: { + name: "Number" + } + } + } + } +}; + +export const HDInsightPigActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightPig", + type: { + name: "Composite", + className: "HDInsightPigActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storageLinkedServices: { + serializedName: "typeProperties.storageLinkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "any" + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + scriptPath: { + serializedName: "typeProperties.scriptPath", + type: { + name: "any" + } + }, + scriptLinkedService: { + serializedName: "typeProperties.scriptLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + defines: { + serializedName: "typeProperties.defines", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const HDInsightMapReduceActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightMapReduce", + type: { + name: "Composite", + className: "HDInsightMapReduceActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storageLinkedServices: { + serializedName: "typeProperties.storageLinkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + className: { + serializedName: "typeProperties.className", + required: true, + type: { + name: "any" + } + }, + jarFilePath: { + serializedName: "typeProperties.jarFilePath", + required: true, + type: { + name: "any" + } + }, + jarLinkedService: { + serializedName: "typeProperties.jarLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + jarLibs: { + serializedName: "typeProperties.jarLibs", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + defines: { + serializedName: "typeProperties.defines", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const HDInsightStreamingActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightStreaming", + type: { + name: "Composite", + className: "HDInsightStreamingActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storageLinkedServices: { + serializedName: "typeProperties.storageLinkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + mapper: { + serializedName: "typeProperties.mapper", + required: true, + type: { + name: "any" + } + }, + reducer: { + serializedName: "typeProperties.reducer", + required: true, + type: { + name: "any" + } + }, + input: { + serializedName: "typeProperties.input", + required: true, + type: { + name: "any" + } + }, + output: { + serializedName: "typeProperties.output", + required: true, + type: { + name: "any" + } + }, + filePaths: { + serializedName: "typeProperties.filePaths", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + fileLinkedService: { + serializedName: "typeProperties.fileLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + combiner: { + serializedName: "typeProperties.combiner", + type: { + name: "any" + } + }, + commandEnvironment: { + serializedName: "typeProperties.commandEnvironment", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + defines: { + serializedName: "typeProperties.defines", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const HDInsightSparkActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightSpark", + type: { + name: "Composite", + className: "HDInsightSparkActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + rootPath: { + serializedName: "typeProperties.rootPath", + required: true, + type: { + name: "any" + } + }, + entryFilePath: { + serializedName: "typeProperties.entryFilePath", + required: true, + type: { + name: "any" + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + sparkJobLinkedService: { + serializedName: "typeProperties.sparkJobLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + className: { + serializedName: "typeProperties.className", + type: { + name: "String" + } + }, + proxyUser: { + serializedName: "typeProperties.proxyUser", + type: { + name: "any" + } + }, + sparkConfig: { + serializedName: "typeProperties.sparkConfig", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const ExecuteSsisPackageActivity: coreHttp.CompositeMapper = { + serializedName: "ExecuteSSISPackage", + type: { + name: "Composite", + className: "ExecuteSsisPackageActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + packageLocation: { + serializedName: "typeProperties.packageLocation", + type: { + name: "Composite", + className: "SsisPackageLocation" + } + }, + runtime: { + serializedName: "typeProperties.runtime", + type: { + name: "any" + } + }, + loggingLevel: { + serializedName: "typeProperties.loggingLevel", + type: { + name: "any" + } + }, + environmentPath: { + serializedName: "typeProperties.environmentPath", + type: { + name: "any" + } + }, + executionCredential: { + serializedName: "typeProperties.executionCredential", + type: { + name: "Composite", + className: "SsisExecutionCredential" + } + }, + connectVia: { + serializedName: "typeProperties.connectVia", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + }, + projectParameters: { + serializedName: "typeProperties.projectParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "SsisExecutionParameter" } + } + } + }, + packageParameters: { + serializedName: "typeProperties.packageParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "SsisExecutionParameter" } + } + } + }, + projectConnectionManagers: { + serializedName: "typeProperties.projectConnectionManagers", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + packageConnectionManagers: { + serializedName: "typeProperties.packageConnectionManagers", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + propertyOverrides: { + serializedName: "typeProperties.propertyOverrides", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "SsisPropertyOverride" } + } + } + }, + logLocation: { + serializedName: "typeProperties.logLocation", + type: { + name: "Composite", + className: "SsisLogLocation" + } + } + } + } +}; + +export const CustomActivity: coreHttp.CompositeMapper = { + serializedName: "Custom", + type: { + name: "Composite", + className: "CustomActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + command: { + serializedName: "typeProperties.command", + required: true, + type: { + name: "any" + } + }, + resourceLinkedService: { + serializedName: "typeProperties.resourceLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + folderPath: { + serializedName: "typeProperties.folderPath", + type: { + name: "any" + } + }, + referenceObjects: { + serializedName: "typeProperties.referenceObjects", + type: { + name: "Composite", + className: "CustomActivityReferenceObject" + } + }, + extendedProperties: { + serializedName: "typeProperties.extendedProperties", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + retentionTimeInDays: { + serializedName: "typeProperties.retentionTimeInDays", + type: { + name: "any" + } + } + } + } +}; + +export const SqlServerStoredProcedureActivity: coreHttp.CompositeMapper = { + serializedName: "SqlServerStoredProcedure", + type: { + name: "Composite", + className: "SqlServerStoredProcedureActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storedProcedureName: { + serializedName: "typeProperties.storedProcedureName", + required: true, + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "typeProperties.storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + } + } + } +}; + +export const DeleteActivity: coreHttp.CompositeMapper = { + serializedName: "Delete", + type: { + name: "Composite", + className: "DeleteActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + recursive: { + serializedName: "typeProperties.recursive", + type: { + name: "any" + } + }, + maxConcurrentConnections: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.maxConcurrentConnections", + type: { + name: "Number" + } + }, + enableLogging: { + serializedName: "typeProperties.enableLogging", + type: { + name: "any" + } + }, + logStorageSettings: { + serializedName: "typeProperties.logStorageSettings", + type: { + name: "Composite", + className: "LogStorageSettings" + } + }, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } +}; + +export const AzureDataExplorerCommandActivity: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorerCommand", + type: { + name: "Composite", + className: "AzureDataExplorerCommandActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + command: { + serializedName: "typeProperties.command", + required: true, + type: { + name: "any" + } + }, + commandTimeout: { + serializedName: "typeProperties.commandTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const LookupActivity: coreHttp.CompositeMapper = { + serializedName: "Lookup", + type: { + name: "Composite", + className: "LookupActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + source: { + serializedName: "typeProperties.source", + type: { + name: "Composite", + className: "CopySource" + } + }, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + }, + firstRowOnly: { + serializedName: "typeProperties.firstRowOnly", + type: { + name: "any" + } + } + } + } +}; + +export const WebActivity: coreHttp.CompositeMapper = { + serializedName: "WebActivity", + type: { + name: "Composite", + className: "WebActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + method: { + serializedName: "typeProperties.method", + required: true, + type: { + name: "String" + } + }, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + headers: { + serializedName: "typeProperties.headers", + type: { + name: "any" + } + }, + body: { + serializedName: "typeProperties.body", + type: { + name: "any" + } + }, + authentication: { + serializedName: "typeProperties.authentication", + type: { + name: "Composite", + className: "WebActivityAuthentication" + } + }, + datasets: { + serializedName: "typeProperties.datasets", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + }, + linkedServices: { + serializedName: "typeProperties.linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + connectVia: { + serializedName: "typeProperties.connectVia", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + } + } + } +}; + +export const GetMetadataActivity: coreHttp.CompositeMapper = { + serializedName: "GetMetadata", + type: { + name: "Composite", + className: "GetMetadataActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + }, + fieldList: { + serializedName: "typeProperties.fieldList", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + } + } + } +}; + +export const AzureMLBatchExecutionActivity: coreHttp.CompositeMapper = { + serializedName: "AzureMLBatchExecution", + type: { + name: "Composite", + className: "AzureMLBatchExecutionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + globalParameters: { + serializedName: "typeProperties.globalParameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + webServiceOutputs: { + serializedName: "typeProperties.webServiceOutputs", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "AzureMLWebServiceFile" } + } + } + }, + webServiceInputs: { + serializedName: "typeProperties.webServiceInputs", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "AzureMLWebServiceFile" } + } + } + } + } + } +}; + +export const AzureMLUpdateResourceActivity: coreHttp.CompositeMapper = { + serializedName: "AzureMLUpdateResource", + type: { + name: "Composite", + className: "AzureMLUpdateResourceActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + trainedModelName: { + serializedName: "typeProperties.trainedModelName", + required: true, + type: { + name: "any" + } + }, + trainedModelLinkedServiceName: { + serializedName: "typeProperties.trainedModelLinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + trainedModelFilePath: { + serializedName: "typeProperties.trainedModelFilePath", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureMLExecutePipelineActivity: coreHttp.CompositeMapper = { + serializedName: "AzureMLExecutePipeline", + type: { + name: "Composite", + className: "AzureMLExecutePipelineActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + mlPipelineId: { + serializedName: "typeProperties.mlPipelineId", + required: true, + type: { + name: "any" + } + }, + experimentName: { + serializedName: "typeProperties.experimentName", + type: { + name: "any" + } + }, + mlPipelineParameters: { + serializedName: "typeProperties.mlPipelineParameters", + type: { + name: "any" + } + }, + mlParentRunId: { + serializedName: "typeProperties.mlParentRunId", + type: { + name: "any" + } + }, + continueOnStepFailure: { + serializedName: "typeProperties.continueOnStepFailure", + type: { + name: "any" + } + } + } + } +}; + +export const DataLakeAnalyticsUsqlActivity: coreHttp.CompositeMapper = { + serializedName: "DataLakeAnalyticsU-SQL", + type: { + name: "Composite", + className: "DataLakeAnalyticsUsqlActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + scriptPath: { + serializedName: "typeProperties.scriptPath", + required: true, + type: { + name: "any" + } + }, + scriptLinkedService: { + serializedName: "typeProperties.scriptLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + degreeOfParallelism: { + serializedName: "typeProperties.degreeOfParallelism", + type: { + name: "any" + } + }, + priority: { + serializedName: "typeProperties.priority", + type: { + name: "any" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + runtimeVersion: { + serializedName: "typeProperties.runtimeVersion", + type: { + name: "any" + } + }, + compilationMode: { + serializedName: "typeProperties.compilationMode", + type: { + name: "any" + } + } + } + } +}; + +export const DatabricksNotebookActivity: coreHttp.CompositeMapper = { + serializedName: "DatabricksNotebook", + type: { + name: "Composite", + className: "DatabricksNotebookActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + notebookPath: { + serializedName: "typeProperties.notebookPath", + required: true, + type: { + name: "any" + } + }, + baseParameters: { + serializedName: "typeProperties.baseParameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + libraries: { + serializedName: "typeProperties.libraries", + type: { + name: "Sequence", + element: { + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } + } + } +}; + +export const DatabricksSparkJarActivity: coreHttp.CompositeMapper = { + serializedName: "DatabricksSparkJar", + type: { + name: "Composite", + className: "DatabricksSparkJarActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + mainClassName: { + serializedName: "typeProperties.mainClassName", + required: true, + type: { + name: "any" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + libraries: { + serializedName: "typeProperties.libraries", + type: { + name: "Sequence", + element: { + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } + } + } +}; + +export const DatabricksSparkPythonActivity: coreHttp.CompositeMapper = { + serializedName: "DatabricksSparkPython", + type: { + name: "Composite", + className: "DatabricksSparkPythonActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + pythonFile: { + serializedName: "typeProperties.pythonFile", + required: true, + type: { + name: "any" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + libraries: { + serializedName: "typeProperties.libraries", + type: { + name: "Sequence", + element: { + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } + } + } +}; + +export const AzureFunctionActivity: coreHttp.CompositeMapper = { + serializedName: "AzureFunctionActivity", + type: { + name: "Composite", + className: "AzureFunctionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + method: { + serializedName: "typeProperties.method", + required: true, + type: { + name: "String" + } + }, + functionName: { + serializedName: "typeProperties.functionName", + required: true, + type: { + name: "any" + } + }, + headers: { + serializedName: "typeProperties.headers", + type: { + name: "any" + } + }, + body: { + serializedName: "typeProperties.body", + type: { + name: "any" + } + } + } + } +}; + +export const ExecuteDataFlowActivity: coreHttp.CompositeMapper = { + serializedName: "ExecuteDataFlow", + type: { + name: "Composite", + className: "ExecuteDataFlowActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + dataFlow: { + serializedName: "typeProperties.dataFlow", + type: { + name: "Composite", + className: "DataFlowReference" + } + }, + staging: { + serializedName: "typeProperties.staging", + type: { + name: "Composite", + className: "DataFlowStagingInfo" + } + }, + integrationRuntime: { + serializedName: "typeProperties.integrationRuntime", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + }, + compute: { + serializedName: "typeProperties.compute", + type: { + name: "Composite", + className: "ExecuteDataFlowActivityTypePropertiesCompute" + } + } + } + } +}; + +export const ScheduleTrigger: coreHttp.CompositeMapper = { + serializedName: "ScheduleTrigger", + type: { + name: "Composite", + className: "ScheduleTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...MultiplePipelineTrigger.type.modelProperties, + recurrence: { + serializedName: "typeProperties.recurrence", + type: { + name: "Composite", + className: "ScheduleTriggerRecurrence" + } + } + } + } +}; + +export const BlobTrigger: coreHttp.CompositeMapper = { + serializedName: "BlobTrigger", + type: { + name: "Composite", + className: "BlobTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...MultiplePipelineTrigger.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", + required: true, + type: { + name: "String" + } + }, + maxConcurrency: { + serializedName: "typeProperties.maxConcurrency", + required: true, + type: { + name: "Number" + } + }, + linkedService: { + serializedName: "typeProperties.linkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + } +}; + +export const BlobEventsTrigger: coreHttp.CompositeMapper = { + serializedName: "BlobEventsTrigger", + type: { + name: "Composite", + className: "BlobEventsTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...MultiplePipelineTrigger.type.modelProperties, + blobPathBeginsWith: { + serializedName: "typeProperties.blobPathBeginsWith", + type: { + name: "String" + } + }, + blobPathEndsWith: { + serializedName: "typeProperties.blobPathEndsWith", + type: { + name: "String" + } + }, + ignoreEmptyBlobs: { + serializedName: "typeProperties.ignoreEmptyBlobs", + type: { + name: "Boolean" + } + }, + events: { + serializedName: "typeProperties.events", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + scope: { + serializedName: "typeProperties.scope", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const AzureTableSource: coreHttp.CompositeMapper = { + serializedName: "AzureTableSource", + type: { + name: "Composite", + className: "AzureTableSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + azureTableSourceQuery: { + serializedName: "azureTableSourceQuery", + type: { + name: "any" + } + }, + azureTableSourceIgnoreTableNotFound: { + serializedName: "azureTableSourceIgnoreTableNotFound", + type: { + name: "any" + } + } + } + } +}; + +export const InformixSource: coreHttp.CompositeMapper = { + serializedName: "InformixSource", + type: { + name: "Composite", + className: "InformixSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const Db2Source: coreHttp.CompositeMapper = { + serializedName: "Db2Source", + type: { + name: "Composite", + className: "Db2Source", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const OdbcSource: coreHttp.CompositeMapper = { + serializedName: "OdbcSource", + type: { + name: "Composite", + className: "OdbcSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MySqlSource: coreHttp.CompositeMapper = { + serializedName: "MySqlSource", + type: { + name: "Composite", + className: "MySqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const PostgreSqlSource: coreHttp.CompositeMapper = { + serializedName: "PostgreSqlSource", + type: { + name: "Composite", + className: "PostgreSqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SybaseSource: coreHttp.CompositeMapper = { + serializedName: "SybaseSource", + type: { + name: "Composite", + className: "SybaseSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SapBwSource: coreHttp.CompositeMapper = { + serializedName: "SapBwSource", + type: { + name: "Composite", + className: "SapBwSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceSource: coreHttp.CompositeMapper = { + serializedName: "SalesforceSource", + type: { + name: "Composite", + className: "SalesforceSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + readBehavior: { + serializedName: "readBehavior", + type: { + name: "String" + } + } + } + } +}; + +export const SapCloudForCustomerSource: coreHttp.CompositeMapper = { + serializedName: "SapCloudForCustomerSource", + type: { + name: "Composite", + className: "SapCloudForCustomerSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SapEccSource: coreHttp.CompositeMapper = { + serializedName: "SapEccSource", + type: { + name: "Composite", + className: "SapEccSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SapHanaSource: coreHttp.CompositeMapper = { + serializedName: "SapHanaSource", + type: { + name: "Composite", + className: "SapHanaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + packetSize: { + serializedName: "packetSize", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SapHanaPartitionSettings" + } + } + } + } +}; + +export const SapOpenHubSource: coreHttp.CompositeMapper = { + serializedName: "SapOpenHubSource", + type: { + name: "Composite", + className: "SapOpenHubSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + excludeLastRequest: { + serializedName: "excludeLastRequest", + type: { + name: "any" + } + }, + baseRequestId: { + serializedName: "baseRequestId", + type: { + name: "any" + } + } + } + } +}; + +export const SapTableSource: coreHttp.CompositeMapper = { + serializedName: "SapTableSource", + type: { + name: "Composite", + className: "SapTableSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + rowCount: { + serializedName: "rowCount", + type: { + name: "any" + } + }, + rowSkips: { + serializedName: "rowSkips", + type: { + name: "any" + } + }, + rfcTableFields: { + serializedName: "rfcTableFields", + type: { + name: "any" + } + }, + rfcTableOptions: { + serializedName: "rfcTableOptions", + type: { + name: "any" + } + }, + batchSize: { + serializedName: "batchSize", + type: { + name: "any" + } + }, + customRfcReadTableFunctionModule: { + serializedName: "customRfcReadTableFunctionModule", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SapTablePartitionSettings" + } + } + } + } +}; + +export const SqlSource: coreHttp.CompositeMapper = { + serializedName: "SqlSource", + type: { + name: "Composite", + className: "SqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + } + } + } +}; + +export const SqlServerSource: coreHttp.CompositeMapper = { + serializedName: "SqlServerSource", + type: { + name: "Composite", + className: "SqlServerSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + produceAdditionalTypes: { + serializedName: "produceAdditionalTypes", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlSource: coreHttp.CompositeMapper = { + serializedName: "AzureSqlSource", + type: { + name: "Composite", + className: "AzureSqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + produceAdditionalTypes: { + serializedName: "produceAdditionalTypes", + type: { + name: "any" + } + } + } + } +}; + +export const SqlMISource: coreHttp.CompositeMapper = { + serializedName: "SqlMISource", + type: { + name: "Composite", + className: "SqlMISource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + produceAdditionalTypes: { + serializedName: "produceAdditionalTypes", + type: { + name: "any" + } + } + } + } +}; + +export const SqlDWSource: coreHttp.CompositeMapper = { + serializedName: "SqlDWSource", + type: { + name: "Composite", + className: "SqlDWSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMySqlSource: coreHttp.CompositeMapper = { + serializedName: "AzureMySqlSource", + type: { + name: "Composite", + className: "AzureMySqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const TeradataSource: coreHttp.CompositeMapper = { + serializedName: "TeradataSource", + type: { + name: "Composite", + className: "TeradataSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "TeradataPartitionSettings" + } + } + } + } +}; + +export const CassandraSource: coreHttp.CompositeMapper = { + serializedName: "CassandraSource", + type: { + name: "Composite", + className: "CassandraSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + consistencyLevel: { + serializedName: "consistencyLevel", + type: { + name: "String" + } + } + } + } +}; + +export const AmazonMWSSource: coreHttp.CompositeMapper = { + serializedName: "AmazonMWSSource", + type: { + name: "Composite", + className: "AmazonMWSSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const AzurePostgreSqlSource: coreHttp.CompositeMapper = { + serializedName: "AzurePostgreSqlSource", + type: { + name: "Composite", + className: "AzurePostgreSqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ConcurSource: coreHttp.CompositeMapper = { + serializedName: "ConcurSource", + type: { + name: "Composite", + className: "ConcurSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const CouchbaseSource: coreHttp.CompositeMapper = { + serializedName: "CouchbaseSource", + type: { + name: "Composite", + className: "CouchbaseSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const DrillSource: coreHttp.CompositeMapper = { + serializedName: "DrillSource", + type: { + name: "Composite", + className: "DrillSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const EloquaSource: coreHttp.CompositeMapper = { + serializedName: "EloquaSource", + type: { + name: "Composite", + className: "EloquaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleBigQuerySource: coreHttp.CompositeMapper = { + serializedName: "GoogleBigQuerySource", + type: { + name: "Composite", + className: "GoogleBigQuerySource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const GreenplumSource: coreHttp.CompositeMapper = { + serializedName: "GreenplumSource", + type: { + name: "Composite", + className: "GreenplumSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const HBaseSource: coreHttp.CompositeMapper = { + serializedName: "HBaseSource", + type: { + name: "Composite", + className: "HBaseSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const HiveSource: coreHttp.CompositeMapper = { + serializedName: "HiveSource", + type: { + name: "Composite", + className: "HiveSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const HubspotSource: coreHttp.CompositeMapper = { + serializedName: "HubspotSource", + type: { + name: "Composite", + className: "HubspotSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ImpalaSource: coreHttp.CompositeMapper = { + serializedName: "ImpalaSource", + type: { + name: "Composite", + className: "ImpalaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const JiraSource: coreHttp.CompositeMapper = { + serializedName: "JiraSource", + type: { + name: "Composite", + className: "JiraSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MagentoSource: coreHttp.CompositeMapper = { + serializedName: "MagentoSource", + type: { + name: "Composite", + className: "MagentoSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MariaDBSource: coreHttp.CompositeMapper = { + serializedName: "MariaDBSource", + type: { + name: "Composite", + className: "MariaDBSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMariaDBSource: coreHttp.CompositeMapper = { + serializedName: "AzureMariaDBSource", + type: { + name: "Composite", + className: "AzureMariaDBSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MarketoSource: coreHttp.CompositeMapper = { + serializedName: "MarketoSource", + type: { + name: "Composite", + className: "MarketoSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const PaypalSource: coreHttp.CompositeMapper = { + serializedName: "PaypalSource", + type: { + name: "Composite", + className: "PaypalSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const PhoenixSource: coreHttp.CompositeMapper = { + serializedName: "PhoenixSource", + type: { + name: "Composite", + className: "PhoenixSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const PrestoSource: coreHttp.CompositeMapper = { + serializedName: "PrestoSource", + type: { + name: "Composite", + className: "PrestoSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const QuickBooksSource: coreHttp.CompositeMapper = { + serializedName: "QuickBooksSource", + type: { + name: "Composite", + className: "QuickBooksSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ServiceNowSource: coreHttp.CompositeMapper = { + serializedName: "ServiceNowSource", + type: { + name: "Composite", + className: "ServiceNowSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ShopifySource: coreHttp.CompositeMapper = { + serializedName: "ShopifySource", + type: { + name: "Composite", + className: "ShopifySource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SparkSource: coreHttp.CompositeMapper = { + serializedName: "SparkSource", + type: { + name: "Composite", + className: "SparkSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SquareSource: coreHttp.CompositeMapper = { + serializedName: "SquareSource", + type: { + name: "Composite", + className: "SquareSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const XeroSource: coreHttp.CompositeMapper = { + serializedName: "XeroSource", + type: { + name: "Composite", + className: "XeroSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ZohoSource: coreHttp.CompositeMapper = { + serializedName: "ZohoSource", + type: { + name: "Composite", + className: "ZohoSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const NetezzaSource: coreHttp.CompositeMapper = { + serializedName: "NetezzaSource", + type: { + name: "Composite", + className: "NetezzaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "NetezzaPartitionSettings" + } + } + } + } +}; + +export const VerticaSource: coreHttp.CompositeMapper = { + serializedName: "VerticaSource", + type: { + name: "Composite", + className: "VerticaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceMarketingCloudSource: coreHttp.CompositeMapper = { + serializedName: "SalesforceMarketingCloudSource", + type: { + name: "Composite", + className: "SalesforceMarketingCloudSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ResponsysSource: coreHttp.CompositeMapper = { + serializedName: "ResponsysSource", + type: { + name: "Composite", + className: "ResponsysSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsAXSource: coreHttp.CompositeMapper = { + serializedName: "DynamicsAXSource", + type: { + name: "Composite", + className: "DynamicsAXSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const OracleServiceCloudSource: coreHttp.CompositeMapper = { + serializedName: "OracleServiceCloudSource", + type: { + name: "Composite", + className: "OracleServiceCloudSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleAdWordsSource: coreHttp.CompositeMapper = { + serializedName: "GoogleAdWordsSource", + type: { + name: "Composite", + className: "GoogleAdWordsSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonRedshiftSource: coreHttp.CompositeMapper = { + serializedName: "AmazonRedshiftSource", + type: { + name: "Composite", + className: "AmazonRedshiftSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + redshiftUnloadSettings: { + serializedName: "redshiftUnloadSettings", + type: { + name: "Composite", + className: "RedshiftUnloadSettings" + } + } + } + } +}; + +export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper = { + serializedName: "TumblingWindowTriggerDependencyReference", + type: { + name: "Composite", + className: "TumblingWindowTriggerDependencyReference", + uberParent: "DependencyReference", + polymorphicDiscriminator: DependencyReference.type.polymorphicDiscriminator, + modelProperties: { + ...TriggerDependencyReference.type.modelProperties, + offset: { + constraints: { + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), + MaxLength: 15, + MinLength: 8 + }, + serializedName: "offset", + type: { + name: "String" + } + }, + size: { + constraints: { + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), + MaxLength: 15, + MinLength: 8 + }, + serializedName: "size", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugSessionCreateDataFlowDebugSessionHeaders: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugSessionCreateDataFlowDebugSessionHeaders", + modelProperties: { + location: { + serializedName: "location", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugSessionExecuteCommandHeaders: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugSessionExecuteCommandHeaders", + modelProperties: { + location: { + serializedName: "location", + type: { + name: "String" + } + } + } + } +}; + +export let discriminators = { + LinkedService: LinkedService, + Dataset: Dataset, + Activity: Activity, + Trigger: Trigger, + DataFlow: DataFlow, + IntegrationRuntime: IntegrationRuntime, + SecretBase: SecretBase, + DatasetLocation: DatasetLocation, + DatasetStorageFormat: DatasetStorageFormat, + DatasetCompression: DatasetCompression, + WebLinkedServiceTypeProperties: WebLinkedServiceTypeProperties, + StoreReadSettings: StoreReadSettings, + StoreWriteSettings: StoreWriteSettings, + FormatReadSettings: FormatReadSettings, + FormatWriteSettings: FormatWriteSettings, + CopySource: CopySource, + CopySink: CopySink, + CopyTranslator: CopyTranslator, + DependencyReference: DependencyReference, + "CustomSetupBase.undefined": CustomSetupBase, + LinkedIntegrationRuntimeType: LinkedIntegrationRuntimeType, + "LinkedService.AzureStorage": AzureStorageLinkedService, + "LinkedService.AzureBlobStorage": AzureBlobStorageLinkedService, + "LinkedService.AzureTableStorage": AzureTableStorageLinkedService, + "LinkedService.AzureSqlDW": AzureSqlDWLinkedService, + "LinkedService.SqlServer": SqlServerLinkedService, + "LinkedService.AzureSqlDatabase": AzureSqlDatabaseLinkedService, + "LinkedService.AzureSqlMI": AzureSqlMILinkedService, + "LinkedService.AzureBatch": AzureBatchLinkedService, + "LinkedService.AzureKeyVault": AzureKeyVaultLinkedService, + "LinkedService.CosmosDb": CosmosDbLinkedService, + "LinkedService.Dynamics": DynamicsLinkedService, + "LinkedService.DynamicsCrm": DynamicsCrmLinkedService, + "LinkedService.CommonDataServiceForApps": CommonDataServiceForAppsLinkedService, + "LinkedService.HDInsight": HDInsightLinkedService, + "LinkedService.FileServer": FileServerLinkedService, + "LinkedService.AzureFileStorage": AzureFileStorageLinkedService, + "LinkedService.GoogleCloudStorage": GoogleCloudStorageLinkedService, + "LinkedService.Oracle": OracleLinkedService, + "LinkedService.AzureMySql": AzureMySqlLinkedService, + "LinkedService.MySql": MySqlLinkedService, + "LinkedService.PostgreSql": PostgreSqlLinkedService, + "LinkedService.Sybase": SybaseLinkedService, + "LinkedService.Db2": Db2LinkedService, + "LinkedService.Teradata": TeradataLinkedService, + "LinkedService.AzureML": AzureMLLinkedService, + "LinkedService.AzureMLService": AzureMLServiceLinkedService, + "LinkedService.Odbc": OdbcLinkedService, + "LinkedService.Informix": InformixLinkedService, + "LinkedService.MicrosoftAccess": MicrosoftAccessLinkedService, + "LinkedService.Hdfs": HdfsLinkedService, + "LinkedService.OData": ODataLinkedService, + "LinkedService.Web": WebLinkedService, + "LinkedService.Cassandra": CassandraLinkedService, + "LinkedService.MongoDb": MongoDbLinkedService, + "LinkedService.MongoDbV2": MongoDbV2LinkedService, + "LinkedService.CosmosDbMongoDbApi": CosmosDbMongoDbApiLinkedService, + "LinkedService.AzureDataLakeStore": AzureDataLakeStoreLinkedService, + "LinkedService.AzureBlobFS": AzureBlobFSLinkedService, + "LinkedService.Office365": Office365LinkedService, + "LinkedService.Salesforce": SalesforceLinkedService, + "LinkedService.SalesforceServiceCloud": SalesforceServiceCloudLinkedService, + "LinkedService.SapCloudForCustomer": SapCloudForCustomerLinkedService, + "LinkedService.SapEcc": SapEccLinkedService, + "LinkedService.SapOpenHub": SapOpenHubLinkedService, + "LinkedService.RestService": RestServiceLinkedService, + "LinkedService.AmazonS3": AmazonS3LinkedService, + "LinkedService.AmazonRedshift": AmazonRedshiftLinkedService, + "LinkedService.CustomDataSource": CustomDataSourceLinkedService, + "LinkedService.AzureSearch": AzureSearchLinkedService, + "LinkedService.HttpServer": HttpLinkedService, + "LinkedService.FtpServer": FtpServerLinkedService, + "LinkedService.Sftp": SftpServerLinkedService, + "LinkedService.SapBW": SapBWLinkedService, + "LinkedService.SapHana": SapHanaLinkedService, + "LinkedService.AmazonMWS": AmazonMWSLinkedService, + "LinkedService.AzurePostgreSql": AzurePostgreSqlLinkedService, + "LinkedService.Concur": ConcurLinkedService, + "LinkedService.Couchbase": CouchbaseLinkedService, + "LinkedService.Drill": DrillLinkedService, + "LinkedService.Eloqua": EloquaLinkedService, + "LinkedService.GoogleBigQuery": GoogleBigQueryLinkedService, + "LinkedService.Greenplum": GreenplumLinkedService, + "LinkedService.HBase": HBaseLinkedService, + "LinkedService.Hive": HiveLinkedService, + "LinkedService.Hubspot": HubspotLinkedService, + "LinkedService.Impala": ImpalaLinkedService, + "LinkedService.Jira": JiraLinkedService, + "LinkedService.Magento": MagentoLinkedService, + "LinkedService.MariaDB": MariaDBLinkedService, + "LinkedService.AzureMariaDB": AzureMariaDBLinkedService, + "LinkedService.Marketo": MarketoLinkedService, + "LinkedService.Paypal": PaypalLinkedService, + "LinkedService.Phoenix": PhoenixLinkedService, + "LinkedService.Presto": PrestoLinkedService, + "LinkedService.QuickBooks": QuickBooksLinkedService, + "LinkedService.ServiceNow": ServiceNowLinkedService, + "LinkedService.Shopify": ShopifyLinkedService, + "LinkedService.Spark": SparkLinkedService, + "LinkedService.Square": SquareLinkedService, + "LinkedService.Xero": XeroLinkedService, + "LinkedService.Zoho": ZohoLinkedService, + "LinkedService.Vertica": VerticaLinkedService, + "LinkedService.Netezza": NetezzaLinkedService, + "LinkedService.SalesforceMarketingCloud": SalesforceMarketingCloudLinkedService, + "LinkedService.HDInsightOnDemand": HDInsightOnDemandLinkedService, + "LinkedService.AzureDataLakeAnalytics": AzureDataLakeAnalyticsLinkedService, + "LinkedService.AzureDatabricks": AzureDatabricksLinkedService, + "LinkedService.Responsys": ResponsysLinkedService, + "LinkedService.DynamicsAX": DynamicsAXLinkedService, + "LinkedService.OracleServiceCloud": OracleServiceCloudLinkedService, + "LinkedService.GoogleAdWords": GoogleAdWordsLinkedService, + "LinkedService.SapTable": SapTableLinkedService, + "LinkedService.AzureDataExplorer": AzureDataExplorerLinkedService, + "LinkedService.AzureFunction": AzureFunctionLinkedService, + "Dataset.Avro": AvroDataset, + "Dataset.Parquet": ParquetDataset, + "Dataset.DelimitedText": DelimitedTextDataset, + "Dataset.Json": JsonDataset, + "Dataset.Orc": OrcDataset, + "Dataset.Binary": BinaryDataset, + "Dataset.AzureTable": AzureTableDataset, + "Dataset.AzureSqlTable": AzureSqlTableDataset, + "Dataset.AzureSqlMITable": AzureSqlMITableDataset, + "Dataset.AzureSqlDWTable": AzureSqlDWTableDataset, + "Dataset.CassandraTable": CassandraTableDataset, + "Dataset.CustomDataset": CustomDataset, + "Dataset.CosmosDbSqlApiCollection": CosmosDbSqlApiCollectionDataset, + "Dataset.DocumentDbCollection": DocumentDbCollectionDataset, + "Dataset.DynamicsEntity": DynamicsEntityDataset, + "Dataset.DynamicsCrmEntity": DynamicsCrmEntityDataset, + "Dataset.CommonDataServiceForAppsEntity": CommonDataServiceForAppsEntityDataset, + "Dataset.Office365Table": Office365Dataset, + "Dataset.MongoDbCollection": MongoDbCollectionDataset, + "Dataset.MongoDbV2Collection": MongoDbV2CollectionDataset, + "Dataset.CosmosDbMongoDbApiCollection": CosmosDbMongoDbApiCollectionDataset, + "Dataset.ODataResource": ODataResourceDataset, + "Dataset.OracleTable": OracleTableDataset, + "Dataset.TeradataTable": TeradataTableDataset, + "Dataset.AzureMySqlTable": AzureMySqlTableDataset, + "Dataset.AmazonRedshiftTable": AmazonRedshiftTableDataset, + "Dataset.Db2Table": Db2TableDataset, + "Dataset.RelationalTable": RelationalTableDataset, + "Dataset.InformixTable": InformixTableDataset, + "Dataset.OdbcTable": OdbcTableDataset, + "Dataset.MySqlTable": MySqlTableDataset, + "Dataset.PostgreSqlTable": PostgreSqlTableDataset, + "Dataset.MicrosoftAccessTable": MicrosoftAccessTableDataset, + "Dataset.SalesforceObject": SalesforceObjectDataset, + "Dataset.SalesforceServiceCloudObject": SalesforceServiceCloudObjectDataset, + "Dataset.SybaseTable": SybaseTableDataset, + "Dataset.SapBwCube": SapBwCubeDataset, + "Dataset.SapCloudForCustomerResource": SapCloudForCustomerResourceDataset, + "Dataset.SapEccResource": SapEccResourceDataset, + "Dataset.SapHanaTable": SapHanaTableDataset, + "Dataset.SapOpenHubTable": SapOpenHubTableDataset, + "Dataset.SqlServerTable": SqlServerTableDataset, + "Dataset.RestResource": RestResourceDataset, + "Dataset.SapTableResource": SapTableResourceDataset, + "Dataset.WebTable": WebTableDataset, + "Dataset.AzureSearchIndex": AzureSearchIndexDataset, + "Dataset.AmazonMWSObject": AmazonMWSObjectDataset, + "Dataset.AzurePostgreSqlTable": AzurePostgreSqlTableDataset, + "Dataset.ConcurObject": ConcurObjectDataset, + "Dataset.CouchbaseTable": CouchbaseTableDataset, + "Dataset.DrillTable": DrillTableDataset, + "Dataset.EloquaObject": EloquaObjectDataset, + "Dataset.GoogleBigQueryObject": GoogleBigQueryObjectDataset, + "Dataset.GreenplumTable": GreenplumTableDataset, + "Dataset.HBaseObject": HBaseObjectDataset, + "Dataset.HiveObject": HiveObjectDataset, + "Dataset.HubspotObject": HubspotObjectDataset, + "Dataset.ImpalaObject": ImpalaObjectDataset, + "Dataset.JiraObject": JiraObjectDataset, + "Dataset.MagentoObject": MagentoObjectDataset, + "Dataset.MariaDBTable": MariaDBTableDataset, + "Dataset.AzureMariaDBTable": AzureMariaDBTableDataset, + "Dataset.MarketoObject": MarketoObjectDataset, + "Dataset.PaypalObject": PaypalObjectDataset, + "Dataset.PhoenixObject": PhoenixObjectDataset, + "Dataset.PrestoObject": PrestoObjectDataset, + "Dataset.QuickBooksObject": QuickBooksObjectDataset, + "Dataset.ServiceNowObject": ServiceNowObjectDataset, + "Dataset.ShopifyObject": ShopifyObjectDataset, + "Dataset.SparkObject": SparkObjectDataset, + "Dataset.SquareObject": SquareObjectDataset, + "Dataset.XeroObject": XeroObjectDataset, + "Dataset.ZohoObject": ZohoObjectDataset, + "Dataset.NetezzaTable": NetezzaTableDataset, + "Dataset.VerticaTable": VerticaTableDataset, + "Dataset.SalesforceMarketingCloudObject": SalesforceMarketingCloudObjectDataset, + "Dataset.ResponsysObject": ResponsysObjectDataset, + "Dataset.DynamicsAXResource": DynamicsAXResourceDataset, + "Dataset.OracleServiceCloudObject": OracleServiceCloudObjectDataset, + "Dataset.AzureDataExplorerTable": AzureDataExplorerTableDataset, + "Dataset.GoogleAdWordsObject": GoogleAdWordsObjectDataset, + "Activity.Container": ControlActivity, + "Activity.Execution": ExecutionActivity, + "Activity.ExecutePipeline": ExecutePipelineActivity, + "Activity.IfCondition": IfConditionActivity, + "Activity.Switch": SwitchActivity, + "Activity.ForEach": ForEachActivity, + "Activity.Wait": WaitActivity, + "Activity.Until": UntilActivity, + "Activity.Validation": ValidationActivity, + "Activity.Filter": FilterActivity, + "Activity.SetVariable": SetVariableActivity, + "Activity.AppendVariable": AppendVariableActivity, + "Activity.WebHook": WebHookActivity, + "Activity.SynapseNotebook": SynapseNotebookActivity, + "Activity.SparkJob": SynapseSparkJobDefinitionActivity, + "Activity.SqlPoolStoredProcedure": SqlPoolStoredProcedureActivity, + "Trigger.RerunTumblingWindowTrigger": RerunTumblingWindowTrigger, + "Trigger.MultiplePipelineTrigger": MultiplePipelineTrigger, + "Trigger.TumblingWindowTrigger": TumblingWindowTrigger, + "Trigger.ChainingTrigger": ChainingTrigger, + "DataFlow.MappingDataFlow": MappingDataFlow, + "IntegrationRuntime.Managed": ManagedIntegrationRuntime, + "IntegrationRuntime.SelfHosted": SelfHostedIntegrationRuntime, + "SecretBase.SecureString": SecureString, + "SecretBase.AzureKeyVaultSecret": AzureKeyVaultSecretReference, + "DatasetLocation.AzureBlobStorageLocation": AzureBlobStorageLocation, + "DatasetLocation.AzureBlobFSLocation": AzureBlobFSLocation, + "DatasetLocation.AzureDataLakeStoreLocation": AzureDataLakeStoreLocation, + "DatasetLocation.AmazonS3Location": AmazonS3Location, + "DatasetLocation.FileServerLocation": FileServerLocation, + "DatasetLocation.AzureFileStorageLocation": AzureFileStorageLocation, + "DatasetLocation.GoogleCloudStorageLocation": GoogleCloudStorageLocation, + "DatasetLocation.FtpServerLocation": FtpServerLocation, + "DatasetLocation.SftpLocation": SftpLocation, + "DatasetLocation.HttpServerLocation": HttpServerLocation, + "DatasetLocation.HdfsLocation": HdfsLocation, + "DatasetStorageFormat.TextFormat": TextFormat, + "DatasetStorageFormat.JsonFormat": JsonFormat, + "DatasetStorageFormat.AvroFormat": AvroFormat, + "DatasetStorageFormat.OrcFormat": OrcFormat, + "DatasetStorageFormat.ParquetFormat": ParquetFormat, + "DatasetCompression.BZip2": DatasetBZip2Compression, + "DatasetCompression.GZip": DatasetGZipCompression, + "DatasetCompression.Deflate": DatasetDeflateCompression, + "DatasetCompression.ZipDeflate": DatasetZipDeflateCompression, + "WebLinkedServiceTypeProperties.Anonymous": WebAnonymousAuthentication, + "WebLinkedServiceTypeProperties.Basic": WebBasicAuthentication, + "WebLinkedServiceTypeProperties.ClientCertificate": WebClientCertificateAuthentication, + "StoreReadSettings.AzureBlobStorageReadSettings": AzureBlobStorageReadSettings, + "StoreReadSettings.AzureBlobFSReadSettings": AzureBlobFSReadSettings, + "StoreReadSettings.AzureDataLakeStoreReadSettings": AzureDataLakeStoreReadSettings, + "StoreReadSettings.AmazonS3ReadSettings": AmazonS3ReadSettings, + "StoreReadSettings.FileServerReadSettings": FileServerReadSettings, + "StoreReadSettings.AzureFileStorageReadSettings": AzureFileStorageReadSettings, + "StoreReadSettings.GoogleCloudStorageReadSettings": GoogleCloudStorageReadSettings, + "StoreReadSettings.FtpReadSettings": FtpReadSettings, + "StoreReadSettings.SftpReadSettings": SftpReadSettings, + "StoreReadSettings.HttpReadSettings": HttpReadSettings, + "StoreReadSettings.HdfsReadSettings": HdfsReadSettings, + "StoreWriteSettings.SftpWriteSettings": SftpWriteSettings, + "StoreWriteSettings.AzureBlobStorageWriteSettings": AzureBlobStorageWriteSettings, + "StoreWriteSettings.AzureBlobFSWriteSettings": AzureBlobFSWriteSettings, + "StoreWriteSettings.AzureDataLakeStoreWriteSettings": AzureDataLakeStoreWriteSettings, + "StoreWriteSettings.FileServerWriteSettings": FileServerWriteSettings, + "FormatReadSettings.DelimitedTextReadSettings": DelimitedTextReadSettings, + "FormatWriteSettings.AvroWriteSettings": AvroWriteSettings, + "FormatWriteSettings.DelimitedTextWriteSettings": DelimitedTextWriteSettings, + "FormatWriteSettings.JsonWriteSettings": JsonWriteSettings, + "CopySource.AvroSource": AvroSource, + "CopySource.ParquetSource": ParquetSource, + "CopySource.DelimitedTextSource": DelimitedTextSource, + "CopySource.JsonSource": JsonSource, + "CopySource.OrcSource": OrcSource, + "CopySource.BinarySource": BinarySource, + "CopySource.TabularSource": TabularSource, + "CopySource.BlobSource": BlobSource, + "CopySource.DocumentDbCollectionSource": DocumentDbCollectionSource, + "CopySource.CosmosDbSqlApiSource": CosmosDbSqlApiSource, + "CopySource.DynamicsSource": DynamicsSource, + "CopySource.DynamicsCrmSource": DynamicsCrmSource, + "CopySource.CommonDataServiceForAppsSource": CommonDataServiceForAppsSource, + "CopySource.RelationalSource": RelationalSource, + "CopySource.MicrosoftAccessSource": MicrosoftAccessSource, + "CopySource.ODataSource": ODataSource, + "CopySource.SalesforceServiceCloudSource": SalesforceServiceCloudSource, + "CopySource.RestSource": RestSource, + "CopySource.FileSystemSource": FileSystemSource, + "CopySource.HdfsSource": HdfsSource, + "CopySource.AzureDataExplorerSource": AzureDataExplorerSource, + "CopySource.OracleSource": OracleSource, + "CopySource.WebSource": WebSource, + "CopySource.MongoDbSource": MongoDbSource, + "CopySource.MongoDbV2Source": MongoDbV2Source, + "CopySource.CosmosDbMongoDbApiSource": CosmosDbMongoDbApiSource, + "CopySource.Office365Source": Office365Source, + "CopySource.AzureDataLakeStoreSource": AzureDataLakeStoreSource, + "CopySource.AzureBlobFSSource": AzureBlobFSSource, + "CopySource.HttpSource": HttpSource, + "CopySink.DelimitedTextSink": DelimitedTextSink, + "CopySink.JsonSink": JsonSink, + "CopySink.OrcSink": OrcSink, + "CopySink.AzurePostgreSqlSink": AzurePostgreSqlSink, + "CopySink.AzureMySqlSink": AzureMySqlSink, + "CopySink.SapCloudForCustomerSink": SapCloudForCustomerSink, + "CopySink.AzureQueueSink": AzureQueueSink, + "CopySink.AzureTableSink": AzureTableSink, + "CopySink.AvroSink": AvroSink, + "CopySink.ParquetSink": ParquetSink, + "CopySink.BinarySink": BinarySink, + "CopySink.BlobSink": BlobSink, + "CopySink.FileSystemSink": FileSystemSink, + "CopySink.DocumentDbCollectionSink": DocumentDbCollectionSink, + "CopySink.CosmosDbSqlApiSink": CosmosDbSqlApiSink, + "CopySink.SqlSink": SqlSink, + "CopySink.SqlServerSink": SqlServerSink, + "CopySink.AzureSqlSink": AzureSqlSink, + "CopySink.SqlMISink": SqlMISink, + "CopySink.SqlDWSink": SqlDWSink, + "CopySink.OracleSink": OracleSink, + "CopySink.AzureDataLakeStoreSink": AzureDataLakeStoreSink, + "CopySink.AzureBlobFSSink": AzureBlobFSSink, + "CopySink.AzureSearchIndexSink": AzureSearchIndexSink, + "CopySink.OdbcSink": OdbcSink, + "CopySink.InformixSink": InformixSink, + "CopySink.MicrosoftAccessSink": MicrosoftAccessSink, + "CopySink.DynamicsSink": DynamicsSink, + "CopySink.DynamicsCrmSink": DynamicsCrmSink, + "CopySink.CommonDataServiceForAppsSink": CommonDataServiceForAppsSink, + "CopySink.AzureDataExplorerSink": AzureDataExplorerSink, + "CopySink.SalesforceSink": SalesforceSink, + "CopySink.SalesforceServiceCloudSink": SalesforceServiceCloudSink, + "CopySink.CosmosDbMongoDbApiSink": CosmosDbMongoDbApiSink, + "CopyTranslator.TabularTranslator": TabularTranslator, + "DependencyReference.TriggerDependencyReference": TriggerDependencyReference, + "DependencyReference.SelfDependencyTumblingWindowTriggerReference": SelfDependencyTumblingWindowTriggerReference, + "LinkedIntegrationRuntimeType.Key": LinkedIntegrationRuntimeKeyAuthorization, + "LinkedIntegrationRuntimeType.RBAC": LinkedIntegrationRuntimeRbacAuthorization, + "Activity.Copy": CopyActivity, + "Activity.HDInsightHive": HDInsightHiveActivity, + "Activity.HDInsightPig": HDInsightPigActivity, + "Activity.HDInsightMapReduce": HDInsightMapReduceActivity, + "Activity.HDInsightStreaming": HDInsightStreamingActivity, + "Activity.HDInsightSpark": HDInsightSparkActivity, + "Activity.ExecuteSSISPackage": ExecuteSsisPackageActivity, + "Activity.Custom": CustomActivity, + "Activity.SqlServerStoredProcedure": SqlServerStoredProcedureActivity, + "Activity.Delete": DeleteActivity, + "Activity.AzureDataExplorerCommand": AzureDataExplorerCommandActivity, + "Activity.Lookup": LookupActivity, + "Activity.WebActivity": WebActivity, + "Activity.GetMetadata": GetMetadataActivity, + "Activity.AzureMLBatchExecution": AzureMLBatchExecutionActivity, + "Activity.AzureMLUpdateResource": AzureMLUpdateResourceActivity, + "Activity.AzureMLExecutePipeline": AzureMLExecutePipelineActivity, + "Activity.DataLakeAnalyticsU-SQL": DataLakeAnalyticsUsqlActivity, + "Activity.DatabricksNotebook": DatabricksNotebookActivity, + "Activity.DatabricksSparkJar": DatabricksSparkJarActivity, + "Activity.DatabricksSparkPython": DatabricksSparkPythonActivity, + "Activity.AzureFunctionActivity": AzureFunctionActivity, + "Activity.ExecuteDataFlow": ExecuteDataFlowActivity, + "Trigger.ScheduleTrigger": ScheduleTrigger, + "Trigger.BlobTrigger": BlobTrigger, + "Trigger.BlobEventsTrigger": BlobEventsTrigger, + "CopySource.AzureTableSource": AzureTableSource, + "CopySource.InformixSource": InformixSource, + "CopySource.Db2Source": Db2Source, + "CopySource.OdbcSource": OdbcSource, + "CopySource.MySqlSource": MySqlSource, + "CopySource.PostgreSqlSource": PostgreSqlSource, + "CopySource.SybaseSource": SybaseSource, + "CopySource.SapBwSource": SapBwSource, + "CopySource.SalesforceSource": SalesforceSource, + "CopySource.SapCloudForCustomerSource": SapCloudForCustomerSource, + "CopySource.SapEccSource": SapEccSource, + "CopySource.SapHanaSource": SapHanaSource, + "CopySource.SapOpenHubSource": SapOpenHubSource, + "CopySource.SapTableSource": SapTableSource, + "CopySource.SqlSource": SqlSource, + "CopySource.SqlServerSource": SqlServerSource, + "CopySource.AzureSqlSource": AzureSqlSource, + "CopySource.SqlMISource": SqlMISource, + "CopySource.SqlDWSource": SqlDWSource, + "CopySource.AzureMySqlSource": AzureMySqlSource, + "CopySource.TeradataSource": TeradataSource, + "CopySource.CassandraSource": CassandraSource, + "CopySource.AmazonMWSSource": AmazonMWSSource, + "CopySource.AzurePostgreSqlSource": AzurePostgreSqlSource, + "CopySource.ConcurSource": ConcurSource, + "CopySource.CouchbaseSource": CouchbaseSource, + "CopySource.DrillSource": DrillSource, + "CopySource.EloquaSource": EloquaSource, + "CopySource.GoogleBigQuerySource": GoogleBigQuerySource, + "CopySource.GreenplumSource": GreenplumSource, + "CopySource.HBaseSource": HBaseSource, + "CopySource.HiveSource": HiveSource, + "CopySource.HubspotSource": HubspotSource, + "CopySource.ImpalaSource": ImpalaSource, + "CopySource.JiraSource": JiraSource, + "CopySource.MagentoSource": MagentoSource, + "CopySource.MariaDBSource": MariaDBSource, + "CopySource.AzureMariaDBSource": AzureMariaDBSource, + "CopySource.MarketoSource": MarketoSource, + "CopySource.PaypalSource": PaypalSource, + "CopySource.PhoenixSource": PhoenixSource, + "CopySource.PrestoSource": PrestoSource, + "CopySource.QuickBooksSource": QuickBooksSource, + "CopySource.ServiceNowSource": ServiceNowSource, + "CopySource.ShopifySource": ShopifySource, + "CopySource.SparkSource": SparkSource, + "CopySource.SquareSource": SquareSource, + "CopySource.XeroSource": XeroSource, + "CopySource.ZohoSource": ZohoSource, + "CopySource.NetezzaSource": NetezzaSource, + "CopySource.VerticaSource": VerticaSource, + "CopySource.SalesforceMarketingCloudSource": SalesforceMarketingCloudSource, + "CopySource.ResponsysSource": ResponsysSource, + "CopySource.DynamicsAXSource": DynamicsAXSource, + "CopySource.OracleServiceCloudSource": OracleServiceCloudSource, + "CopySource.GoogleAdWordsSource": GoogleAdWordsSource, + "CopySource.AmazonRedshiftSource": AmazonRedshiftSource, + "DependencyReference.TumblingWindowTriggerDependencyReference": TumblingWindowTriggerDependencyReference +}; diff --git a/sdk/synapse/synapse-artifacts/src/models/parameters.ts b/sdk/synapse/synapse-artifacts/src/models/parameters.ts new file mode 100644 index 000000000000..139f22e85a37 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/models/parameters.ts @@ -0,0 +1,400 @@ +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; +import { + LinkedServiceResource as LinkedServiceResourceMapper, + ArtifactRenameRequest as ArtifactRenameRequestMapper, + DatasetResource as DatasetResourceMapper, + PipelineResource as PipelineResourceMapper, + RunFilterParameters as RunFilterParametersMapper, + TriggerResource as TriggerResourceMapper, + DataFlowResource as DataFlowResourceMapper, + CreateDataFlowDebugSessionRequest as CreateDataFlowDebugSessionRequestMapper, + DataFlowDebugPackage as DataFlowDebugPackageMapper, + DeleteDataFlowDebugSessionRequest as DeleteDataFlowDebugSessionRequestMapper, + DataFlowDebugCommandRequest as DataFlowDebugCommandRequestMapper, + SqlScriptResource as SqlScriptResourceMapper, + SparkJobDefinitionResource as SparkJobDefinitionResourceMapper, + NotebookResource as NotebookResourceMapper, + GitHubAccessTokenRequest as GitHubAccessTokenRequestMapper +} from "../models/mappers"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const apiVersion: OperationQueryParameter = { + parameterPath: "apiVersion", + mapper: { + defaultValue: "2019-06-01-preview", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const contentType: OperationParameter = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; + +export const linkedService: OperationParameter = { + parameterPath: "linkedService", + mapper: LinkedServiceResourceMapper +}; + +export const linkedServiceName: OperationURLParameter = { + parameterPath: "linkedServiceName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "linkedServiceName", + required: true, + type: { + name: "String" + } + } +}; + +export const ifMatch: OperationParameter = { + parameterPath: ["options", "ifMatch"], + mapper: { + serializedName: "If-Match", + type: { + name: "String" + } + } +}; + +export const ifNoneMatch: OperationParameter = { + parameterPath: ["options", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + type: { + name: "String" + } + } +}; + +export const request: OperationParameter = { + parameterPath: "request", + mapper: ArtifactRenameRequestMapper +}; + +export const nextLink: OperationURLParameter = { + parameterPath: "nextLink", + mapper: { + serializedName: "nextLink", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const dataset: OperationParameter = { + parameterPath: "dataset", + mapper: DatasetResourceMapper +}; + +export const datasetName: OperationURLParameter = { + parameterPath: "datasetName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "datasetName", + required: true, + type: { + name: "String" + } + } +}; + +export const pipeline: OperationParameter = { + parameterPath: "pipeline", + mapper: PipelineResourceMapper +}; + +export const pipelineName: OperationURLParameter = { + parameterPath: "pipelineName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "pipelineName", + required: true, + type: { + name: "String" + } + } +}; + +export const parameters: OperationParameter = { + parameterPath: ["options", "parameters"], + mapper: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } +}; + +export const referencePipelineRunId: OperationQueryParameter = { + parameterPath: ["options", "referencePipelineRunId"], + mapper: { + serializedName: "referencePipelineRunId", + type: { + name: "String" + } + } +}; + +export const isRecovery: OperationQueryParameter = { + parameterPath: ["options", "isRecovery"], + mapper: { + serializedName: "isRecovery", + type: { + name: "Boolean" + } + } +}; + +export const startActivityName: OperationQueryParameter = { + parameterPath: ["options", "startActivityName"], + mapper: { + serializedName: "startActivityName", + type: { + name: "String" + } + } +}; + +export const filterParameters: OperationParameter = { + parameterPath: "filterParameters", + mapper: RunFilterParametersMapper +}; + +export const runId: OperationURLParameter = { + parameterPath: "runId", + mapper: { + serializedName: "runId", + required: true, + type: { + name: "String" + } + } +}; + +export const isRecursive: OperationQueryParameter = { + parameterPath: ["options", "isRecursive"], + mapper: { + serializedName: "isRecursive", + type: { + name: "Boolean" + } + } +}; + +export const trigger: OperationParameter = { + parameterPath: "trigger", + mapper: TriggerResourceMapper +}; + +export const triggerName: OperationURLParameter = { + parameterPath: "triggerName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "triggerName", + required: true, + type: { + name: "String" + } + } +}; + +export const dataFlow: OperationParameter = { + parameterPath: "dataFlow", + mapper: DataFlowResourceMapper +}; + +export const dataFlowName: OperationURLParameter = { + parameterPath: "dataFlowName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "dataFlowName", + required: true, + type: { + name: "String" + } + } +}; + +export const request1: OperationParameter = { + parameterPath: "request", + mapper: CreateDataFlowDebugSessionRequestMapper +}; + +export const request2: OperationParameter = { + parameterPath: "request", + mapper: DataFlowDebugPackageMapper +}; + +export const request3: OperationParameter = { + parameterPath: "request", + mapper: DeleteDataFlowDebugSessionRequestMapper +}; + +export const request4: OperationParameter = { + parameterPath: "request", + mapper: DataFlowDebugCommandRequestMapper +}; + +export const sqlScript: OperationParameter = { + parameterPath: "sqlScript", + mapper: SqlScriptResourceMapper +}; + +export const sqlScriptName: OperationURLParameter = { + parameterPath: "sqlScriptName", + mapper: { + serializedName: "sqlScriptName", + required: true, + type: { + name: "String" + } + } +}; + +export const sparkJobDefinition: OperationParameter = { + parameterPath: "sparkJobDefinition", + mapper: SparkJobDefinitionResourceMapper +}; + +export const sparkJobDefinitionName: OperationURLParameter = { + parameterPath: "sparkJobDefinitionName", + mapper: { + serializedName: "sparkJobDefinitionName", + required: true, + type: { + name: "String" + } + } +}; + +export const sparkJobDefinitionAzureResource: OperationParameter = { + parameterPath: "sparkJobDefinitionAzureResource", + mapper: SparkJobDefinitionResourceMapper +}; + +export const notebook: OperationParameter = { + parameterPath: "notebook", + mapper: NotebookResourceMapper +}; + +export const notebookName: OperationURLParameter = { + parameterPath: "notebookName", + mapper: { + serializedName: "notebookName", + required: true, + type: { + name: "String" + } + } +}; + +export const sqlPoolName: OperationURLParameter = { + parameterPath: "sqlPoolName", + mapper: { + serializedName: "sqlPoolName", + required: true, + type: { + name: "String" + } + } +}; + +export const bigDataPoolName: OperationURLParameter = { + parameterPath: "bigDataPoolName", + mapper: { + serializedName: "bigDataPoolName", + required: true, + type: { + name: "String" + } + } +}; + +export const integrationRuntimeName: OperationURLParameter = { + parameterPath: "integrationRuntimeName", + mapper: { + serializedName: "integrationRuntimeName", + required: true, + type: { + name: "String" + } + } +}; + +export const gitHubAccessTokenRequest: OperationParameter = { + parameterPath: "gitHubAccessTokenRequest", + mapper: GitHubAccessTokenRequestMapper +}; + +export const clientRequestId: OperationParameter = { + parameterPath: ["options", "clientRequestId"], + mapper: { + serializedName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts new file mode 100644 index 000000000000..977767e2578f --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -0,0 +1,89 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { BigDataPoolsListResponse, BigDataPoolsGetResponse } from "../models"; + +/** + * Class representing a BigDataPools. + */ +export class BigDataPools { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class BigDataPools class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * List Big Data Pools + * @param options The options parameters. + */ + list(options?: coreHttp.OperationOptions): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; + } + + /** + * Get Big Data Pool + * @param bigDataPoolName The Big Data Pool name + * @param options The options parameters. + */ + get( + bigDataPoolName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + bigDataPoolName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const listOperationSpec: coreHttp.OperationSpec = { + path: "/bigDataPools", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BigDataPoolResourceInfoListResult + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getOperationSpec: coreHttp.OperationSpec = { + path: "/bigDataPools/{bigDataPoolName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BigDataPoolResourceInfo + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.bigDataPoolName], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts new file mode 100644 index 000000000000..3dffca96470b --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -0,0 +1,370 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + DataFlowResource, + DataFlowCreateOrUpdateDataFlowOptionalParams, + DataFlowCreateOrUpdateDataFlowResponse, + DataFlowGetDataFlowOptionalParams, + DataFlowGetDataFlowResponse, + ArtifactRenameRequest, + DataFlowGetDataFlowsByWorkspaceResponse, + DataFlowGetDataFlowsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a DataFlow. + */ +export class DataFlow { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class DataFlow class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists data flows. + * @param options The options parameters. + */ + public listDataFlowsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getDataFlowsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getDataFlowsByWorkspacePagingPage(options); + } + }; + } + + private async *getDataFlowsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getDataFlowsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getDataFlowsByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getDataFlowsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getDataFlowsByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Creates or updates a data flow. + * @param dataFlowName The data flow name. + * @param dataFlow Data flow resource definition. + * @param options The options parameters. + */ + async createOrUpdateDataFlow( + dataFlowName: string, + dataFlow: DataFlowResource, + options?: DataFlowCreateOrUpdateDataFlowOptionalParams + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + dataFlowName, + dataFlow, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + DataFlowCreateOrUpdateDataFlowResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateDataFlowOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateDataFlowOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a data flow. + * @param dataFlowName The data flow name. + * @param options The options parameters. + */ + getDataFlow( + dataFlowName: string, + options?: DataFlowGetDataFlowOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + dataFlowName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getDataFlowOperationSpec + ) as Promise; + } + + /** + * Deletes a data flow. + * @param dataFlowName The data flow name. + * @param options The options parameters. + */ + async deleteDataFlow( + dataFlowName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + dataFlowName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteDataFlowOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteDataFlowOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a dataflow. + * @param dataFlowName The data flow name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameDataFlow( + dataFlowName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + dataFlowName, + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameDataFlowOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameDataFlowOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Lists data flows. + * @param options The options parameters. + */ + private _getDataFlowsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getDataFlowsByWorkspaceOperationSpec + ) as Promise; + } + + /** + * GetDataFlowsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetDataFlowsByWorkspace + * method. + * @param options The options parameters. + */ + private _getDataFlowsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getDataFlowsByWorkspaceNextOperationSpec + ) as Promise; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows/{dataFlowName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.DataFlowResource + }, + 201: { + bodyMapper: Mappers.DataFlowResource + }, + 202: { + bodyMapper: Mappers.DataFlowResource + }, + 204: { + bodyMapper: Mappers.DataFlowResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.dataFlow, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.dataFlowName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows/{dataFlowName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DataFlowResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.dataFlowName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows/{dataFlowName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.dataFlowName], + headerParameters: [Parameters.accept], + serializer +}; +const renameDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows/{dataFlowName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.dataFlowName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getDataFlowsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DataFlowListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getDataFlowsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DataFlowListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts new file mode 100644 index 000000000000..f0e17e80dafd --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -0,0 +1,368 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + DataFlowDebugSessionInfo, + CreateDataFlowDebugSessionRequest, + DataFlowDebugSessionCreateDataFlowDebugSessionResponse, + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse, + DataFlowDebugPackage, + DataFlowDebugSessionAddDataFlowResponse, + DeleteDataFlowDebugSessionRequest, + DataFlowDebugCommandRequest, + DataFlowDebugSessionExecuteCommandResponse, + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a DataFlowDebugSession. + */ +export class DataFlowDebugSession { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class DataFlowDebugSession class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Query all active data flow debug sessions. + * @param options The options parameters. + */ + public listQueryDataFlowDebugSessionsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.queryDataFlowDebugSessionsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.queryDataFlowDebugSessionsByWorkspacePagingPage(options); + } + }; + } + + private async *queryDataFlowDebugSessionsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._queryDataFlowDebugSessionsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._queryDataFlowDebugSessionsByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *queryDataFlowDebugSessionsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( + options + )) { + yield* page; + } + } + + /** + * Creates a data flow debug session. + * @param request Data flow debug session definition + * @param options The options parameters. + */ + async createDataFlowDebugSession( + request: CreateDataFlowDebugSessionRequest, + options?: coreHttp.OperationOptions + ): Promise< + LROPoller + > { + const operationArguments: coreHttp.OperationArguments = { + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + DataFlowDebugSessionCreateDataFlowDebugSessionResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createDataFlowDebugSessionOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createDataFlowDebugSessionOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Query all active data flow debug sessions. + * @param options The options parameters. + */ + private _queryDataFlowDebugSessionsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse + > { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + queryDataFlowDebugSessionsByWorkspaceOperationSpec + ) as Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse + >; + } + + /** + * Add a data flow into debug session. + * @param request Data flow debug session definition with debug content. + * @param options The options parameters. + */ + addDataFlow( + request: DataFlowDebugPackage, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + request, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + addDataFlowOperationSpec + ) as Promise; + } + + /** + * Deletes a data flow debug session. + * @param request Data flow debug session definition for deletion + * @param options The options parameters. + */ + deleteDataFlowDebugSession( + request: DeleteDataFlowDebugSessionRequest, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + request, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + deleteDataFlowDebugSessionOperationSpec + ) as Promise; + } + + /** + * Execute a data flow debug command. + * @param request Data flow debug command definition. + * @param options The options parameters. + */ + async executeCommand( + request: DataFlowDebugCommandRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + DataFlowDebugSessionExecuteCommandResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + executeCommandOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: executeCommandOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * QueryDataFlowDebugSessionsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the + * QueryDataFlowDebugSessionsByWorkspace method. + * @param options The options parameters. + */ + private _queryDataFlowDebugSessionsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse + > { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + queryDataFlowDebugSessionsByWorkspaceNextOperationSpec + ) as Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse + >; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const createDataFlowDebugSessionOperationSpec: coreHttp.OperationSpec = { + path: "/createDataFlowDebugSession", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.CreateDataFlowDebugSessionResponse + }, + 201: { + bodyMapper: Mappers.CreateDataFlowDebugSessionResponse + }, + 202: { + bodyMapper: Mappers.CreateDataFlowDebugSessionResponse + }, + 204: { + bodyMapper: Mappers.CreateDataFlowDebugSessionResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request1, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const queryDataFlowDebugSessionsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/queryDataFlowDebugSessions", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.QueryDataFlowDebugSessionsResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const addDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/addDataFlowToDebugSession", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.AddDataFlowToDebugSessionResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request2, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const deleteDataFlowDebugSessionOperationSpec: coreHttp.OperationSpec = { + path: "/deleteDataFlowDebugSession", + httpMethod: "POST", + responses: { + 200: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request3, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const executeCommandOperationSpec: coreHttp.OperationSpec = { + path: "/executeDataFlowDebugCommand", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.DataFlowDebugCommandResponse + }, + 201: { + bodyMapper: Mappers.DataFlowDebugCommandResponse + }, + 202: { + bodyMapper: Mappers.DataFlowDebugCommandResponse + }, + 204: { + bodyMapper: Mappers.DataFlowDebugCommandResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request4, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const queryDataFlowDebugSessionsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.QueryDataFlowDebugSessionsResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts new file mode 100644 index 000000000000..0cef4925e7c0 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -0,0 +1,370 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + DatasetResource, + DatasetGetDatasetsByWorkspaceResponse, + DatasetCreateOrUpdateDatasetOptionalParams, + DatasetCreateOrUpdateDatasetResponse, + DatasetGetDatasetOptionalParams, + DatasetGetDatasetResponse, + ArtifactRenameRequest, + DatasetGetDatasetsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a Dataset. + */ +export class Dataset { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Dataset class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists datasets. + * @param options The options parameters. + */ + public listDatasetsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getDatasetsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getDatasetsByWorkspacePagingPage(options); + } + }; + } + + private async *getDatasetsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getDatasetsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getDatasetsByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getDatasetsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getDatasetsByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists datasets. + * @param options The options parameters. + */ + private _getDatasetsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getDatasetsByWorkspaceOperationSpec + ) as Promise; + } + + /** + * Creates or updates a dataset. + * @param datasetName The dataset name. + * @param dataset Dataset resource definition. + * @param options The options parameters. + */ + async createOrUpdateDataset( + datasetName: string, + dataset: DatasetResource, + options?: DatasetCreateOrUpdateDatasetOptionalParams + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + datasetName, + dataset, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + DatasetCreateOrUpdateDatasetResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateDatasetOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateDatasetOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a dataset. + * @param datasetName The dataset name. + * @param options The options parameters. + */ + getDataset( + datasetName: string, + options?: DatasetGetDatasetOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + datasetName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getDatasetOperationSpec + ) as Promise; + } + + /** + * Deletes a dataset. + * @param datasetName The dataset name. + * @param options The options parameters. + */ + async deleteDataset( + datasetName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + datasetName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteDatasetOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteDatasetOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a dataset. + * @param datasetName The dataset name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameDataset( + datasetName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + datasetName, + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameDatasetOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameDatasetOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetDatasetsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetDatasetsByWorkspace method. + * @param options The options parameters. + */ + private _getDatasetsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getDatasetsByWorkspaceNextOperationSpec + ) as Promise; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getDatasetsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/datasets", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DatasetListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { + path: "/datasets/{datasetName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.DatasetResource + }, + 201: { + bodyMapper: Mappers.DatasetResource + }, + 202: { + bodyMapper: Mappers.DatasetResource + }, + 204: { + bodyMapper: Mappers.DatasetResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.dataset, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.datasetName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getDatasetOperationSpec: coreHttp.OperationSpec = { + path: "/datasets/{datasetName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DatasetResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.datasetName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteDatasetOperationSpec: coreHttp.OperationSpec = { + path: "/datasets/{datasetName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.datasetName], + headerParameters: [Parameters.accept], + serializer +}; +const renameDatasetOperationSpec: coreHttp.OperationSpec = { + path: "/datasets/{datasetName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.datasetName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getDatasetsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DatasetListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/index.ts b/sdk/synapse/synapse-artifacts/src/operations/index.ts new file mode 100644 index 000000000000..3463c5a1b9bc --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/index.ts @@ -0,0 +1,16 @@ +export * from "./linkedService"; +export * from "./dataset"; +export * from "./pipeline"; +export * from "./pipelineRun"; +export * from "./trigger"; +export * from "./triggerRun"; +export * from "./dataFlow"; +export * from "./dataFlowDebugSession"; +export * from "./sqlScript"; +export * from "./sparkJobDefinition"; +export * from "./notebook"; +export * from "./workspace"; +export * from "./sqlPools"; +export * from "./bigDataPools"; +export * from "./integrationRuntimes"; +export * from "./workspaceGitRepoManagement"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts new file mode 100644 index 000000000000..4736ccb3746a --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -0,0 +1,94 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { + IntegrationRuntimesListResponse, + IntegrationRuntimesGetResponse +} from "../models"; + +/** + * Class representing a IntegrationRuntimes. + */ +export class IntegrationRuntimes { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class IntegrationRuntimes class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * List Integration Runtimes + * @param options The options parameters. + */ + list( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; + } + + /** + * Get Integration Runtime + * @param integrationRuntimeName The Integration Runtime name + * @param options The options parameters. + */ + get( + integrationRuntimeName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + integrationRuntimeName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const listOperationSpec: coreHttp.OperationSpec = { + path: "/integrationRuntimes", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.IntegrationRuntimeListResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getOperationSpec: coreHttp.OperationSpec = { + path: "/integrationRuntimes/{integrationRuntimeName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.IntegrationRuntimeResource + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.integrationRuntimeName], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts new file mode 100644 index 000000000000..35436c92ba04 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -0,0 +1,373 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + LinkedServiceResource, + LinkedServiceGetLinkedServicesByWorkspaceResponse, + LinkedServiceCreateOrUpdateLinkedServiceOptionalParams, + LinkedServiceCreateOrUpdateLinkedServiceResponse, + LinkedServiceGetLinkedServiceOptionalParams, + LinkedServiceGetLinkedServiceResponse, + ArtifactRenameRequest, + LinkedServiceGetLinkedServicesByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a LinkedService. + */ +export class LinkedService { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class LinkedService class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists linked services. + * @param options The options parameters. + */ + public listLinkedServicesByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getLinkedServicesByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getLinkedServicesByWorkspacePagingPage(options); + } + }; + } + + private async *getLinkedServicesByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getLinkedServicesByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getLinkedServicesByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getLinkedServicesByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getLinkedServicesByWorkspacePagingPage( + options + )) { + yield* page; + } + } + + /** + * Lists linked services. + * @param options The options parameters. + */ + private _getLinkedServicesByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getLinkedServicesByWorkspaceOperationSpec + ) as Promise; + } + + /** + * Creates or updates a linked service. + * @param linkedServiceName The linked service name. + * @param linkedService Linked service resource definition. + * @param options The options parameters. + */ + async createOrUpdateLinkedService( + linkedServiceName: string, + linkedService: LinkedServiceResource, + options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + linkedServiceName, + linkedService, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + LinkedServiceCreateOrUpdateLinkedServiceResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateLinkedServiceOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateLinkedServiceOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a linked service. + * @param linkedServiceName The linked service name. + * @param options The options parameters. + */ + getLinkedService( + linkedServiceName: string, + options?: LinkedServiceGetLinkedServiceOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + linkedServiceName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getLinkedServiceOperationSpec + ) as Promise; + } + + /** + * Deletes a linked service. + * @param linkedServiceName The linked service name. + * @param options The options parameters. + */ + async deleteLinkedService( + linkedServiceName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + linkedServiceName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteLinkedServiceOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteLinkedServiceOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a linked service. + * @param linkedServiceName The linked service name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameLinkedService( + linkedServiceName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + linkedServiceName, + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameLinkedServiceOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameLinkedServiceOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetLinkedServicesByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetLinkedServicesByWorkspace + * method. + * @param options The options parameters. + */ + private _getLinkedServicesByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getLinkedServicesByWorkspaceNextOperationSpec + ) as Promise; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getLinkedServicesByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.LinkedServiceListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices/{linkedServiceName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.LinkedServiceResource + }, + 201: { + bodyMapper: Mappers.LinkedServiceResource + }, + 202: { + bodyMapper: Mappers.LinkedServiceResource + }, + 204: { + bodyMapper: Mappers.LinkedServiceResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.linkedService, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getLinkedServiceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices/{linkedServiceName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.LinkedServiceResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteLinkedServiceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices/{linkedServiceName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], + headerParameters: [Parameters.accept], + serializer +}; +const renameLinkedServiceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices/{linkedServiceName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getLinkedServicesByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.LinkedServiceListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts new file mode 100644 index 000000000000..df003d827a91 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -0,0 +1,488 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + NotebookResource, + NotebookGetNotebooksByWorkspaceResponse, + NotebookGetNotebookSummaryByWorkSpaceResponse, + NotebookCreateOrUpdateNotebookOptionalParams, + NotebookCreateOrUpdateNotebookResponse, + NotebookGetNotebookOptionalParams, + NotebookGetNotebookResponse, + ArtifactRenameRequest, + NotebookGetNotebooksByWorkspaceNextResponse, + NotebookGetNotebookSummaryByWorkSpaceNextResponse +} from "../models"; + +/** + * Class representing a Notebook. + */ +export class Notebook { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Notebook class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists Notebooks. + * @param options The options parameters. + */ + public listNotebooksByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getNotebooksByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getNotebooksByWorkspacePagingPage(options); + } + }; + } + + private async *getNotebooksByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getNotebooksByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getNotebooksByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getNotebooksByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getNotebooksByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists a summary of Notebooks. + * @param options The options parameters. + */ + public listNotebookSummaryByWorkSpace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getNotebookSummaryByWorkSpacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getNotebookSummaryByWorkSpacePagingPage(options); + } + }; + } + + private async *getNotebookSummaryByWorkSpacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getNotebookSummaryByWorkSpace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getNotebookSummaryByWorkSpaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getNotebookSummaryByWorkSpacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( + options + )) { + yield* page; + } + } + + /** + * Lists Notebooks. + * @param options The options parameters. + */ + private _getNotebooksByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getNotebooksByWorkspaceOperationSpec + ) as Promise; + } + + /** + * Lists a summary of Notebooks. + * @param options The options parameters. + */ + private _getNotebookSummaryByWorkSpace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getNotebookSummaryByWorkSpaceOperationSpec + ) as Promise; + } + + /** + * Creates or updates a Note Book. + * @param notebookName The notebook name. + * @param notebook Note book resource definition. + * @param options The options parameters. + */ + async createOrUpdateNotebook( + notebookName: string, + notebook: NotebookResource, + options?: NotebookCreateOrUpdateNotebookOptionalParams + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + notebookName, + notebook, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + NotebookCreateOrUpdateNotebookResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateNotebookOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateNotebookOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a Note Book. + * @param notebookName The notebook name. + * @param options The options parameters. + */ + getNotebook( + notebookName: string, + options?: NotebookGetNotebookOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + notebookName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getNotebookOperationSpec + ) as Promise; + } + + /** + * Deletes a Note book. + * @param notebookName The notebook name. + * @param options The options parameters. + */ + async deleteNotebook( + notebookName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + notebookName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteNotebookOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteNotebookOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a notebook. + * @param notebookName The notebook name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameNotebook( + notebookName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + notebookName, + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameNotebookOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameNotebookOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetNotebooksByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetNotebooksByWorkspace + * method. + * @param options The options parameters. + */ + private _getNotebooksByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getNotebooksByWorkspaceNextOperationSpec + ) as Promise; + } + + /** + * GetNotebookSummaryByWorkSpaceNext + * @param nextLink The nextLink from the previous successful call to the GetNotebookSummaryByWorkSpace + * method. + * @param options The options parameters. + */ + private _getNotebookSummaryByWorkSpaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getNotebookSummaryByWorkSpaceNextOperationSpec + ) as Promise; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getNotebooksByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getNotebookSummaryByWorkSpaceOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/summary", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/{notebookName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.NotebookResource + }, + 201: { + bodyMapper: Mappers.NotebookResource + }, + 202: { + bodyMapper: Mappers.NotebookResource + }, + 204: { + bodyMapper: Mappers.NotebookResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.notebook, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.notebookName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getNotebookOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/{notebookName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.notebookName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteNotebookOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/{notebookName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.notebookName], + headerParameters: [Parameters.accept], + serializer +}; +const renameNotebookOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/{notebookName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.notebookName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getNotebooksByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; +const getNotebookSummaryByWorkSpaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts new file mode 100644 index 000000000000..87c49bd6c9ae --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -0,0 +1,415 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + PipelineResource, + PipelineGetPipelinesByWorkspaceResponse, + PipelineCreateOrUpdatePipelineOptionalParams, + PipelineCreateOrUpdatePipelineResponse, + PipelineGetPipelineOptionalParams, + PipelineGetPipelineResponse, + ArtifactRenameRequest, + PipelineCreatePipelineRunOptionalParams, + PipelineCreatePipelineRunResponse, + PipelineGetPipelinesByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a Pipeline. + */ +export class Pipeline { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Pipeline class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists pipelines. + * @param options The options parameters. + */ + public listPipelinesByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getPipelinesByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getPipelinesByWorkspacePagingPage(options); + } + }; + } + + private async *getPipelinesByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getPipelinesByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getPipelinesByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getPipelinesByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getPipelinesByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists pipelines. + * @param options The options parameters. + */ + private _getPipelinesByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getPipelinesByWorkspaceOperationSpec + ) as Promise; + } + + /** + * Creates or updates a pipeline. + * @param pipelineName The pipeline name. + * @param pipeline Pipeline resource definition. + * @param options The options parameters. + */ + async createOrUpdatePipeline( + pipelineName: string, + pipeline: PipelineResource, + options?: PipelineCreateOrUpdatePipelineOptionalParams + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + pipeline, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + PipelineCreateOrUpdatePipelineResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdatePipelineOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdatePipelineOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + getPipeline( + pipelineName: string, + options?: PipelineGetPipelineOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getPipelineOperationSpec + ) as Promise; + } + + /** + * Deletes a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + async deletePipeline( + pipelineName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deletePipelineOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deletePipelineOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a pipeline. + * @param pipelineName The pipeline name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renamePipeline( + pipelineName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renamePipelineOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renamePipelineOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Creates a run of a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + createPipelineRun( + pipelineName: string, + options?: PipelineCreatePipelineRunOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + createPipelineRunOperationSpec + ) as Promise; + } + + /** + * GetPipelinesByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetPipelinesByWorkspace + * method. + * @param options The options parameters. + */ + private _getPipelinesByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getPipelinesByWorkspaceNextOperationSpec + ) as Promise; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getPipelinesByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PipelineListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.PipelineResource + }, + 201: { + bodyMapper: Mappers.PipelineResource + }, + 202: { + bodyMapper: Mappers.PipelineResource + }, + 204: { + bodyMapper: Mappers.PipelineResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.pipeline, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getPipelineOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PipelineResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deletePipelineOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept], + serializer +}; +const renamePipelineOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const createPipelineRunOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}/createRun", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: Mappers.CreateRunResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.parameters, + queryParameters: [ + Parameters.apiVersion, + Parameters.referencePipelineRunId, + Parameters.isRecovery, + Parameters.startActivityName + ], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getPipelinesByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PipelineListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts new file mode 100644 index 000000000000..9c2c3370e5cc --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -0,0 +1,182 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { + RunFilterParameters, + PipelineRunQueryPipelineRunsByWorkspaceResponse, + PipelineRunGetPipelineRunResponse, + PipelineRunQueryActivityRunsResponse, + PipelineRunCancelPipelineRunOptionalParams +} from "../models"; + +/** + * Class representing a PipelineRun. + */ +export class PipelineRun { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class PipelineRun class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Query pipeline runs in the workspace based on input filter conditions. + * @param filterParameters Parameters to filter the pipeline run. + * @param options The options parameters. + */ + queryPipelineRunsByWorkspace( + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + filterParameters, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + queryPipelineRunsByWorkspaceOperationSpec + ) as Promise; + } + + /** + * Get a pipeline run by its run ID. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + getPipelineRun( + runId: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + runId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getPipelineRunOperationSpec + ) as Promise; + } + + /** + * Query activity runs based on input filter conditions. + * @param pipelineName The pipeline name. + * @param runId The pipeline run identifier. + * @param filterParameters Parameters to filter the activity runs. + * @param options The options parameters. + */ + queryActivityRuns( + pipelineName: string, + runId: string, + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + runId, + filterParameters, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + queryActivityRunsOperationSpec + ) as Promise; + } + + /** + * Cancel a pipeline run by its run ID. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + cancelPipelineRun( + runId: string, + options?: PipelineRunCancelPipelineRunOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + runId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + cancelPipelineRunOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const queryPipelineRunsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/queryPipelineRuns", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.PipelineRunsQueryResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.filterParameters, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getPipelineRunOperationSpec: coreHttp.OperationSpec = { + path: "/pipelineruns/{runId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PipelineRun + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.runId], + headerParameters: [Parameters.accept], + serializer +}; +const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}/pipelineruns/{runId}/queryActivityruns", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.ActivityRunsQueryResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.filterParameters, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.pipelineName, + Parameters.runId + ], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const cancelPipelineRunOperationSpec: coreHttp.OperationSpec = { + path: "/pipelineruns/{runId}/cancel", + httpMethod: "POST", + responses: { + 200: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion, Parameters.isRecursive], + urlParameters: [Parameters.endpoint, Parameters.runId], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts new file mode 100644 index 000000000000..0604bdeeaf1b --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -0,0 +1,458 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + SparkJobDefinitionResource, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionOptionalParams, + SparkJobDefinitionGetSparkJobDefinitionResponse, + SparkJobDefinitionExecuteSparkJobDefinitionResponse, + ArtifactRenameRequest, + SparkJobDefinitionDebugSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a SparkJobDefinition. + */ +export class SparkJobDefinition { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class SparkJobDefinition class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists spark job definitions. + * @param options The options parameters. + */ + public listSparkJobDefinitionsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getSparkJobDefinitionsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getSparkJobDefinitionsByWorkspacePagingPage(options); + } + }; + } + + private async *getSparkJobDefinitionsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getSparkJobDefinitionsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getSparkJobDefinitionsByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getSparkJobDefinitionsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( + options + )) { + yield* page; + } + } + + /** + * Lists spark job definitions. + * @param options The options parameters. + */ + private _getSparkJobDefinitionsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionsByWorkspaceOperationSpec + ) as Promise; + } + + /** + * Creates or updates a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param sparkJobDefinition Spark Job Definition resource definition. + * @param options The options parameters. + */ + createOrUpdateSparkJobDefinition( + sparkJobDefinitionName: string, + sparkJobDefinition: SparkJobDefinitionResource, + options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + sparkJobDefinition, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + createOrUpdateSparkJobDefinitionOperationSpec + ) as Promise; + } + + /** + * Gets a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + getSparkJobDefinition( + sparkJobDefinitionName: string, + options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionOperationSpec + ) as Promise; + } + + /** + * Deletes a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + deleteSparkJobDefinition( + sparkJobDefinitionName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + deleteSparkJobDefinitionOperationSpec + ) as Promise; + } + + /** + * Executes the spark job definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + async executeSparkJobDefinition( + sparkJobDefinitionName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + options: this.getOperationOptions(options, "location") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + SparkJobDefinitionExecuteSparkJobDefinitionResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + executeSparkJobDefinitionOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: executeSparkJobDefinitionOperationSpec, + initialOperationResult, + sendOperation, + finalStateVia: "location" + }); + } + + /** + * Renames a sparkJobDefinition. + * @param sparkJobDefinitionName The spark job definition name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameSparkJobDefinition( + sparkJobDefinitionName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameSparkJobDefinitionOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameSparkJobDefinitionOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Debug the spark job definition. + * @param sparkJobDefinitionAzureResource Spark Job Definition resource definition. + * @param options The options parameters. + */ + async debugSparkJobDefinition( + sparkJobDefinitionAzureResource: SparkJobDefinitionResource, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionAzureResource, + options: this.getOperationOptions(options, "location") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + SparkJobDefinitionDebugSparkJobDefinitionResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + debugSparkJobDefinitionOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: debugSparkJobDefinitionOperationSpec, + initialOperationResult, + sendOperation, + finalStateVia: "location" + }); + } + + /** + * GetSparkJobDefinitionsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the + * GetSparkJobDefinitionsByWorkspace method. + * @param options The options parameters. + */ + private _getSparkJobDefinitionsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionsByWorkspaceNextOperationSpec + ) as Promise< + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse + >; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSparkJobDefinitionsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkJobDefinitionsListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.SparkJobDefinitionResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.sparkJobDefinition, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkJobDefinitionResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept], + serializer +}; +const executeSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}/execute", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJob + }, + 201: { + bodyMapper: Mappers.SparkBatchJob + }, + 202: { + bodyMapper: Mappers.SparkBatchJob + }, + 204: { + bodyMapper: Mappers.SparkBatchJob + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept], + serializer +}; +const renameSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const debugSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/debugSparkJobDefinition", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJob + }, + 201: { + bodyMapper: Mappers.SparkBatchJob + }, + 202: { + bodyMapper: Mappers.SparkBatchJob + }, + 204: { + bodyMapper: Mappers.SparkBatchJob + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.sparkJobDefinitionAzureResource, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkJobDefinitionsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkJobDefinitionsListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts new file mode 100644 index 000000000000..dc58292e2873 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -0,0 +1,89 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { SqlPoolsListResponse, SqlPoolsGetResponse } from "../models"; + +/** + * Class representing a SqlPools. + */ +export class SqlPools { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class SqlPools class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * List Sql Pools + * @param options The options parameters. + */ + list(options?: coreHttp.OperationOptions): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; + } + + /** + * Get Sql Pool + * @param sqlPoolName The Sql Pool name + * @param options The options parameters. + */ + get( + sqlPoolName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sqlPoolName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const listOperationSpec: coreHttp.OperationSpec = { + path: "/sqlPools", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlPoolInfoListResult + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getOperationSpec: coreHttp.OperationSpec = { + path: "/sqlPools/{sqlPoolName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlPool + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlPoolName], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts new file mode 100644 index 000000000000..d3e08c1f3ea1 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -0,0 +1,330 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + SqlScriptResource, + SqlScriptGetSqlScriptsByWorkspaceResponse, + SqlScriptCreateOrUpdateSqlScriptOptionalParams, + SqlScriptCreateOrUpdateSqlScriptResponse, + SqlScriptGetSqlScriptOptionalParams, + SqlScriptGetSqlScriptResponse, + ArtifactRenameRequest, + SqlScriptGetSqlScriptsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a SqlScript. + */ +export class SqlScript { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class SqlScript class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists sql scripts. + * @param options The options parameters. + */ + public listSqlScriptsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getSqlScriptsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getSqlScriptsByWorkspacePagingPage(options); + } + }; + } + + private async *getSqlScriptsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getSqlScriptsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getSqlScriptsByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getSqlScriptsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getSqlScriptsByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists sql scripts. + * @param options The options parameters. + */ + private _getSqlScriptsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSqlScriptsByWorkspaceOperationSpec + ) as Promise; + } + + /** + * Creates or updates a Sql Script. + * @param sqlScriptName The sql script name. + * @param sqlScript Sql Script resource definition. + * @param options The options parameters. + */ + createOrUpdateSqlScript( + sqlScriptName: string, + sqlScript: SqlScriptResource, + options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sqlScriptName, + sqlScript, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + createOrUpdateSqlScriptOperationSpec + ) as Promise; + } + + /** + * Gets a sql script. + * @param sqlScriptName The sql script name. + * @param options The options parameters. + */ + getSqlScript( + sqlScriptName: string, + options?: SqlScriptGetSqlScriptOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sqlScriptName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSqlScriptOperationSpec + ) as Promise; + } + + /** + * Deletes a Sql Script. + * @param sqlScriptName The sql script name. + * @param options The options parameters. + */ + deleteSqlScript( + sqlScriptName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sqlScriptName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + deleteSqlScriptOperationSpec + ) as Promise; + } + + /** + * Renames a sqlScript. + * @param sqlScriptName The sql script name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameSqlScript( + sqlScriptName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + sqlScriptName, + request, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameSqlScriptOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameSqlScriptOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetSqlScriptsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetSqlScriptsByWorkspace + * method. + * @param options The options parameters. + */ + private _getSqlScriptsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSqlScriptsByWorkspaceNextOperationSpec + ) as Promise; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSqlScriptsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlScriptsListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts/{sqlScriptName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.SqlScriptResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.sqlScript, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getSqlScriptOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts/{sqlScriptName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlScriptResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteSqlScriptOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts/{sqlScriptName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], + headerParameters: [Parameters.accept], + serializer +}; +const renameSqlScriptOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts/{sqlScriptName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSqlScriptsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlScriptsListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts new file mode 100644 index 000000000000..4f333b059259 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -0,0 +1,571 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + TriggerResource, + TriggerGetTriggersByWorkspaceResponse, + TriggerCreateOrUpdateTriggerOptionalParams, + TriggerCreateOrUpdateTriggerResponse, + TriggerGetTriggerOptionalParams, + TriggerGetTriggerResponse, + TriggerSubscribeTriggerToEventsResponse, + TriggerGetEventSubscriptionStatusResponse, + TriggerUnsubscribeTriggerFromEventsResponse, + TriggerGetTriggersByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a Trigger. + */ +export class Trigger { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Trigger class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists triggers. + * @param options The options parameters. + */ + public listTriggersByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getTriggersByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getTriggersByWorkspacePagingPage(options); + } + }; + } + + private async *getTriggersByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getTriggersByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getTriggersByWorkspaceNext( + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getTriggersByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getTriggersByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists triggers. + * @param options The options parameters. + */ + private _getTriggersByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getTriggersByWorkspaceOperationSpec + ) as Promise; + } + + /** + * Creates or updates a trigger. + * @param triggerName The trigger name. + * @param trigger Trigger resource definition. + * @param options The options parameters. + */ + async createOrUpdateTrigger( + triggerName: string, + trigger: TriggerResource, + options?: TriggerCreateOrUpdateTriggerOptionalParams + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + trigger, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + TriggerCreateOrUpdateTriggerResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateTriggerOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateTriggerOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + getTrigger( + triggerName: string, + options?: TriggerGetTriggerOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getTriggerOperationSpec + ) as Promise; + } + + /** + * Deletes a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async deleteTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteTriggerOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteTriggerOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Subscribe event trigger to events. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async subscribeTriggerToEvents( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + TriggerSubscribeTriggerToEventsResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + subscribeTriggerToEventsOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: subscribeTriggerToEventsOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Get a trigger's event subscription status. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + getEventSubscriptionStatus( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getEventSubscriptionStatusOperationSpec + ) as Promise; + } + + /** + * Unsubscribe event trigger from events. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async unsubscribeTriggerFromEvents( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + TriggerUnsubscribeTriggerFromEventsResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + unsubscribeTriggerFromEventsOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: unsubscribeTriggerFromEventsOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Starts a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async startTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + startTriggerOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: startTriggerOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Stops a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async stopTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: this.getOperationOptions(options, "undefined") + }; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; + }; + + const initialOperationResult = await sendOperation( + operationArguments, + stopTriggerOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: stopTriggerOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetTriggersByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetTriggersByWorkspace method. + * @param options The options parameters. + */ + private _getTriggersByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getTriggersByWorkspaceNextOperationSpec + ) as Promise; + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getTriggersByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/triggers", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.TriggerListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.TriggerResource + }, + 201: { + bodyMapper: Mappers.TriggerResource + }, + 202: { + bodyMapper: Mappers.TriggerResource + }, + 204: { + bodyMapper: Mappers.TriggerResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.trigger, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], + mediaType: "json", + serializer +}; +const getTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.TriggerResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const subscribeTriggerToEventsOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/subscribeToEvents", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 201: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 202: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 204: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const getEventSubscriptionStatusOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/getEventSubscriptionStatus", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const unsubscribeTriggerFromEventsOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/unsubscribeFromEvents", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 201: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 202: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 204: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const startTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/start", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const stopTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/stop", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const getTriggersByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.TriggerListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts new file mode 100644 index 000000000000..20e37cfac269 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -0,0 +1,144 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { + RunFilterParameters, + TriggerRunQueryTriggerRunsByWorkspaceResponse +} from "../models"; + +/** + * Class representing a TriggerRun. + */ +export class TriggerRun { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class TriggerRun class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Rerun single trigger instance by runId. + * @param triggerName The trigger name. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + rerunTriggerInstance( + triggerName: string, + runId: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + runId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + rerunTriggerInstanceOperationSpec + ) as Promise; + } + + /** + * Cancel single trigger instance by runId. + * @param triggerName The trigger name. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + cancelTriggerInstance( + triggerName: string, + runId: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + triggerName, + runId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + cancelTriggerInstanceOperationSpec + ) as Promise; + } + + /** + * Query trigger runs. + * @param filterParameters Parameters to filter the pipeline run. + * @param options The options parameters. + */ + queryTriggerRunsByWorkspace( + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + filterParameters, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + queryTriggerRunsByWorkspaceOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/triggerRuns/{runId}/rerun", + httpMethod: "POST", + responses: { + 200: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.runId, + Parameters.triggerName + ], + headerParameters: [Parameters.accept], + serializer +}; +const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/triggerRuns/{runId}/cancel", + httpMethod: "POST", + responses: { + 200: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.runId, + Parameters.triggerName + ], + headerParameters: [Parameters.accept], + serializer +}; +const queryTriggerRunsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/queryTriggerRuns", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.TriggerRunsQueryResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.filterParameters, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts new file mode 100644 index 000000000000..4c4453399fb4 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -0,0 +1,54 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { WorkspaceGetResponse } from "../models"; + +/** + * Class representing a Workspace. + */ +export class Workspace { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Workspace class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Get Workspace + * @param options The options parameters. + */ + get(options?: coreHttp.OperationOptions): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getOperationSpec: coreHttp.OperationSpec = { + path: "/workspace", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.Workspace + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts new file mode 100644 index 000000000000..058888c80532 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -0,0 +1,66 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { + GitHubAccessTokenRequest, + WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams, + WorkspaceGitRepoManagementGetGitHubAccessTokenResponse +} from "../models"; + +/** + * Class representing a WorkspaceGitRepoManagement. + */ +export class WorkspaceGitRepoManagement { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class WorkspaceGitRepoManagement class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Get the GitHub access token. + * @param gitHubAccessTokenRequest + * @param options The options parameters. + */ + getGitHubAccessToken( + gitHubAccessTokenRequest: GitHubAccessTokenRequest, + options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + gitHubAccessTokenRequest, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getGitHubAccessTokenOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { + path: "/getGitHubAccessToken", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.GitHubAccessTokenResponse + } + }, + requestBody: Parameters.gitHubAccessTokenRequest, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.clientRequestId + ], + mediaType: "json", + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/tsconfig.json b/sdk/synapse/synapse-artifacts/tsconfig.json new file mode 100644 index 000000000000..0290d6707a44 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/sdk/synapse/synapse-managed-endpoints/README.md b/sdk/synapse/synapse-managed-endpoints/README.md new file mode 100644 index 000000000000..d4c47ecde213 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/README.md @@ -0,0 +1,27 @@ +## Azure ManagedPrivateEndpointsClient SDK for JavaScript + +This package contains an isomorphic SDK for ManagedPrivateEndpointsClient. + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +### How to Install + +```bash +npm install @azure/synapse-managed-endpoints +``` + +### How to use + +#### Sample code + +Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file diff --git a/sdk/synapse/synapse-managed-endpoints/api-extractor.json b/sdk/synapse/synapse-managed-endpoints/api-extractor.json new file mode 100644 index 000000000000..5fb5b21b56f2 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./esm/index.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-managed-endpoints/package.json b/sdk/synapse/synapse-managed-endpoints/package.json new file mode 100644 index 000000000000..7e06a686ad0f --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/package.json @@ -0,0 +1,52 @@ +{ + "name": "@azure/synapse-managed-endpoints", + "author": "Microsoft Corporation", + "description": "A generated SDK for ManagedPrivateEndpointsClient.", + "version": "1.0.0", + "dependencies": { + "@azure/core-paging": "^1.1.1", + "@azure/core-http": "^1.2.0", + "tslib": "^2.0.0" + }, + "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "license": "MIT", + "main": "./dist/synapse-managed-endpoints.js", + "module": "./esm/index.js", + "types": "./esm/index.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js", + "repository": { + "type": "git", + "url": "https://github.com/Azure/azure-sdk-for-js.git" + }, + "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "src/**/*.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-endpoints.js.map'\" -o ./dist/synapse-managed-endpoints.min.js ./dist/synapse-managed-endpoints.js", + "prepack": "npm install && npm run build", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-managed-endpoints/review/synapse-managed-endpoints.api.md b/sdk/synapse/synapse-managed-endpoints/review/synapse-managed-endpoints.api.md new file mode 100644 index 000000000000..279522fa6880 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/review/synapse-managed-endpoints.api.md @@ -0,0 +1,99 @@ +## API Report File for "@azure/synapse-managed-endpoints" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; + +// @public +export interface ManagedPrivateEndpoint { + readonly id?: string; + readonly name?: string; + properties?: ManagedPrivateEndpointProperties; + readonly type?: string; +} + +// @public +export interface ManagedPrivateEndpointConnectionState { + actionsRequired?: string; + description?: string; + readonly status?: string; +} + +// @public +export interface ManagedPrivateEndpointListResponse { + readonly nextLink?: string; + value?: ManagedPrivateEndpoint[]; +} + +// @public +export interface ManagedPrivateEndpointProperties { + connectionState?: ManagedPrivateEndpointConnectionState; + groupId?: string; + readonly isReserved?: boolean; + privateLinkResourceId?: string; + readonly provisioningState?: string; +} + +// @public (undocumented) +export class ManagedPrivateEndpointsClient extends ManagedPrivateEndpointsClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ManagedPrivateEndpointsClientOptionalParams); + // Warning: (ae-forgotten-export) The symbol "ManagedPrivateEndpoints" needs to be exported by the entry point index.d.ts + // + // (undocumented) + managedPrivateEndpoints: ManagedPrivateEndpoints; +} + +// @public (undocumented) +export class ManagedPrivateEndpointsClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ManagedPrivateEndpointsClientOptionalParams); + // (undocumented) + apiVersion: string; + // (undocumented) + endpoint: string; +} + +// @public +export interface ManagedPrivateEndpointsClientOptionalParams extends coreHttp.ServiceClientOptions { + apiVersion?: string; + endpoint?: string; +} + +// @public +export type ManagedPrivateEndpointsCreateResponse = ManagedPrivateEndpoint & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ManagedPrivateEndpoint; + }; +}; + +// @public +export type ManagedPrivateEndpointsGetResponse = ManagedPrivateEndpoint & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ManagedPrivateEndpoint; + }; +}; + +// @public +export type ManagedPrivateEndpointsListNextResponse = ManagedPrivateEndpointListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ManagedPrivateEndpointListResponse; + }; +}; + +// @public +export type ManagedPrivateEndpointsListResponse = ManagedPrivateEndpointListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ManagedPrivateEndpointListResponse; + }; +}; + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-managed-endpoints/rollup.config.js b/sdk/synapse/synapse-managed-endpoints/rollup.config.js new file mode 100644 index 000000000000..bd02ade8368c --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/rollup.config.js @@ -0,0 +1,31 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./esm/managedPrivateEndpointsClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/synapse-managed-endpoints.js", + format: "umd", + name: "Azure.SynapseManagedEndpoints", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [nodeResolve({ module: true }), sourcemaps()] +}; + +export default config; diff --git a/sdk/synapse/synapse-managed-endpoints/src/index.ts b/sdk/synapse/synapse-managed-endpoints/src/index.ts new file mode 100644 index 000000000000..1aaf83a23b90 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/src/index.ts @@ -0,0 +1,4 @@ +/// +export * from "./models"; +export { ManagedPrivateEndpointsClient } from "./managedPrivateEndpointsClient"; +export { ManagedPrivateEndpointsClientContext } from "./managedPrivateEndpointsClientContext"; diff --git a/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClient.ts b/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClient.ts new file mode 100644 index 000000000000..1df69973a23f --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClient.ts @@ -0,0 +1,24 @@ +import * as coreHttp from "@azure/core-http"; +import { ManagedPrivateEndpoints } from "./operations"; +import { ManagedPrivateEndpointsClientContext } from "./managedPrivateEndpointsClientContext"; +import { ManagedPrivateEndpointsClientOptionalParams } from "./models"; + +export class ManagedPrivateEndpointsClient extends ManagedPrivateEndpointsClientContext { + /** + * Initializes a new instance of the ManagedPrivateEndpointsClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: ManagedPrivateEndpointsClientOptionalParams + ) { + super(credentials, endpoint, options); + this.managedPrivateEndpoints = new ManagedPrivateEndpoints(this); + } + + managedPrivateEndpoints: ManagedPrivateEndpoints; +} diff --git a/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClientContext.ts b/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClientContext.ts new file mode 100644 index 000000000000..8b619e1e7781 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClientContext.ts @@ -0,0 +1,52 @@ +import * as coreHttp from "@azure/core-http"; +import { ManagedPrivateEndpointsClientOptionalParams } from "./models"; + +const packageName = "@azure/synapse-managed-endpoints"; +const packageVersion = "1.0.0"; + +export class ManagedPrivateEndpointsClientContext extends coreHttp.ServiceClient { + endpoint: string; + apiVersion: string; + + /** + * Initializes a new instance of the ManagedPrivateEndpointsClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: ManagedPrivateEndpointsClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = options.endpoint || "{endpoint}"; + + // Parameter assignments + this.endpoint = endpoint; + + // Assigning values to Constant parameters + this.apiVersion = options.apiVersion || "2019-06-01-preview"; + } +} diff --git a/sdk/synapse/synapse-managed-endpoints/src/models/index.ts b/sdk/synapse/synapse-managed-endpoints/src/models/index.ts new file mode 100644 index 000000000000..c8a5ef476bf7 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/src/models/index.ts @@ -0,0 +1,183 @@ +import * as coreHttp from "@azure/core-http"; + +/** + * Managed private endpoint + */ +export interface ManagedPrivateEndpoint { + /** + * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Managed private endpoint properties + */ + properties?: ManagedPrivateEndpointProperties; +} + +/** + * Properties of a managed private endpoint + */ +export interface ManagedPrivateEndpointProperties { + /** + * The ARM resource ID of the resource to which the managed private endpoint is created + */ + privateLinkResourceId?: string; + /** + * The groupId to which the managed private endpoint is created + */ + groupId?: string; + /** + * The managed private endpoint provisioning state + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly provisioningState?: string; + /** + * The managed private endpoint connection state + */ + connectionState?: ManagedPrivateEndpointConnectionState; + /** + * Denotes whether the managed private endpoint is reserved + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly isReserved?: boolean; +} + +/** + * The connection state of a managed private endpoint + */ +export interface ManagedPrivateEndpointConnectionState { + /** + * The approval status + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** + * The managed private endpoint description + */ + description?: string; + /** + * The actions required on the managed private endpoint + */ + actionsRequired?: string; +} + +/** + * A list of managed private endpoints + */ +export interface ManagedPrivateEndpointListResponse { + /** + * List of managed private endpoints + */ + value?: ManagedPrivateEndpoint[]; + /** + * The link to the next page of results, if any remaining results exist. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly nextLink?: string; +} + +/** + * Contains response data for the get operation. + */ +export type ManagedPrivateEndpointsGetResponse = ManagedPrivateEndpoint & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ManagedPrivateEndpoint; + }; +}; + +/** + * Contains response data for the create operation. + */ +export type ManagedPrivateEndpointsCreateResponse = ManagedPrivateEndpoint & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ManagedPrivateEndpoint; + }; +}; + +/** + * Contains response data for the list operation. + */ +export type ManagedPrivateEndpointsListResponse = ManagedPrivateEndpointListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ManagedPrivateEndpointListResponse; + }; +}; + +/** + * Contains response data for the listNext operation. + */ +export type ManagedPrivateEndpointsListNextResponse = ManagedPrivateEndpointListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ManagedPrivateEndpointListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface ManagedPrivateEndpointsClientOptionalParams + extends coreHttp.ServiceClientOptions { + /** + * Api Version + */ + apiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-managed-endpoints/src/models/mappers.ts b/sdk/synapse/synapse-managed-endpoints/src/models/mappers.ts new file mode 100644 index 000000000000..b4b7e559f43c --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/src/models/mappers.ts @@ -0,0 +1,136 @@ +import * as coreHttp from "@azure/core-http"; + +export const ManagedPrivateEndpoint: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedPrivateEndpoint", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + readOnly: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "ManagedPrivateEndpointProperties" + } + } + } + } +}; + +export const ManagedPrivateEndpointProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedPrivateEndpointProperties", + modelProperties: { + privateLinkResourceId: { + serializedName: "privateLinkResourceId", + type: { + name: "String" + } + }, + groupId: { + serializedName: "groupId", + type: { + name: "String" + } + }, + provisioningState: { + serializedName: "provisioningState", + readOnly: true, + type: { + name: "String" + } + }, + connectionState: { + serializedName: "connectionState", + type: { + name: "Composite", + className: "ManagedPrivateEndpointConnectionState" + } + }, + isReserved: { + serializedName: "isReserved", + readOnly: true, + type: { + name: "Boolean" + } + } + } + } +}; + +export const ManagedPrivateEndpointConnectionState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedPrivateEndpointConnectionState", + modelProperties: { + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + actionsRequired: { + serializedName: "actionsRequired", + type: { + name: "String" + } + } + } + } +}; + +export const ManagedPrivateEndpointListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedPrivateEndpointListResponse", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ManagedPrivateEndpoint" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; diff --git a/sdk/synapse/synapse-managed-endpoints/src/models/parameters.ts b/sdk/synapse/synapse-managed-endpoints/src/models/parameters.ts new file mode 100644 index 000000000000..4ba8a4211224 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/src/models/parameters.ts @@ -0,0 +1,93 @@ +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; +import { ManagedPrivateEndpoint as ManagedPrivateEndpointMapper } from "../models/mappers"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const apiVersion: OperationQueryParameter = { + parameterPath: "apiVersion", + mapper: { + defaultValue: "2019-06-01-preview", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const managedVirtualNetworkName: OperationURLParameter = { + parameterPath: "managedVirtualNetworkName", + mapper: { + serializedName: "managedVirtualNetworkName", + required: true, + type: { + name: "String" + } + } +}; + +export const managedPrivateEndpointName: OperationURLParameter = { + parameterPath: "managedPrivateEndpointName", + mapper: { + serializedName: "managedPrivateEndpointName", + required: true, + type: { + name: "String" + } + } +}; + +export const contentType: OperationParameter = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; + +export const managedPrivateEndpoint: OperationParameter = { + parameterPath: "managedPrivateEndpoint", + mapper: ManagedPrivateEndpointMapper +}; + +export const nextLink: OperationURLParameter = { + parameterPath: "nextLink", + mapper: { + serializedName: "nextLink", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; diff --git a/sdk/synapse/synapse-managed-endpoints/src/operations/index.ts b/sdk/synapse/synapse-managed-endpoints/src/operations/index.ts new file mode 100644 index 000000000000..32ab6933be8c --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/src/operations/index.ts @@ -0,0 +1 @@ +export * from "./managedPrivateEndpoints"; diff --git a/sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts new file mode 100644 index 000000000000..d354c10f3bf2 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts @@ -0,0 +1,276 @@ +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ManagedPrivateEndpointsClient } from "../managedPrivateEndpointsClient"; +import { + ManagedPrivateEndpoint, + ManagedPrivateEndpointsGetResponse, + ManagedPrivateEndpointsCreateResponse, + ManagedPrivateEndpointsListResponse, + ManagedPrivateEndpointsListNextResponse +} from "../models"; + +/** + * Class representing a ManagedPrivateEndpoints. + */ +export class ManagedPrivateEndpoints { + private readonly client: ManagedPrivateEndpointsClient; + + /** + * Initialize a new instance of the class ManagedPrivateEndpoints class. + * @param client Reference to the service client + */ + constructor(client: ManagedPrivateEndpointsClient) { + this.client = client; + } + + /** + * List Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param options The options parameters. + */ + public list( + managedVirtualNetworkName: string, + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.listPagingAll(managedVirtualNetworkName, options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.listPagingPage(managedVirtualNetworkName, options); + } + }; + } + + private async *listPagingPage( + managedVirtualNetworkName: string, + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._list(managedVirtualNetworkName, options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._listNext( + managedVirtualNetworkName, + continuationToken, + options + ); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *listPagingAll( + managedVirtualNetworkName: string, + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.listPagingPage( + managedVirtualNetworkName, + options + )) { + yield* page; + } + } + + /** + * Get Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param managedPrivateEndpointName Managed private endpoint name + * @param options The options parameters. + */ + get( + managedVirtualNetworkName: string, + managedPrivateEndpointName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + managedPrivateEndpointName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; + } + + /** + * Create Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param managedPrivateEndpointName Managed private endpoint name + * @param managedPrivateEndpoint Managed private endpoint properties. + * @param options The options parameters. + */ + create( + managedVirtualNetworkName: string, + managedPrivateEndpointName: string, + managedPrivateEndpoint: ManagedPrivateEndpoint, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + managedPrivateEndpointName, + managedPrivateEndpoint, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + createOperationSpec + ) as Promise; + } + + /** + * Delete Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param managedPrivateEndpointName Managed private endpoint name + * @param options The options parameters. + */ + delete( + managedVirtualNetworkName: string, + managedPrivateEndpointName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + managedPrivateEndpointName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + deleteOperationSpec + ) as Promise; + } + + /** + * List Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param options The options parameters. + */ + private _list( + managedVirtualNetworkName: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; + } + + /** + * ListNext + * @param managedVirtualNetworkName Managed virtual network name + * @param nextLink The nextLink from the previous successful call to the List method. + * @param options The options parameters. + */ + private _listNext( + managedVirtualNetworkName: string, + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + nextLink, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + listNextOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getOperationSpec: coreHttp.OperationSpec = { + path: + "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ManagedPrivateEndpoint + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.managedVirtualNetworkName, + Parameters.managedPrivateEndpointName + ], + headerParameters: [Parameters.accept], + serializer +}; +const createOperationSpec: coreHttp.OperationSpec = { + path: + "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.ManagedPrivateEndpoint + } + }, + requestBody: Parameters.managedPrivateEndpoint, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.managedVirtualNetworkName, + Parameters.managedPrivateEndpointName + ], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const deleteOperationSpec: coreHttp.OperationSpec = { + path: + "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + httpMethod: "DELETE", + responses: { 202: {}, 204: {} }, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.managedVirtualNetworkName, + Parameters.managedPrivateEndpointName + ], + serializer +}; +const listOperationSpec: coreHttp.OperationSpec = { + path: + "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ManagedPrivateEndpointListResponse + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.managedVirtualNetworkName], + headerParameters: [Parameters.accept], + serializer +}; +const listNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ManagedPrivateEndpointListResponse + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.managedVirtualNetworkName, + Parameters.nextLink + ], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-managed-endpoints/tsconfig.json b/sdk/synapse/synapse-managed-endpoints/tsconfig.json new file mode 100644 index 000000000000..0290d6707a44 --- /dev/null +++ b/sdk/synapse/synapse-managed-endpoints/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/sdk/synapse/synapse-monitoring/README.md b/sdk/synapse/synapse-monitoring/README.md new file mode 100644 index 000000000000..28921720d464 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/README.md @@ -0,0 +1,27 @@ +## Azure MonitoringClient SDK for JavaScript + +This package contains an isomorphic SDK for MonitoringClient. + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +### How to Install + +```bash +npm install @azure/synapse-monitoring +``` + +### How to use + +#### Sample code + +Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file diff --git a/sdk/synapse/synapse-monitoring/api-extractor.json b/sdk/synapse/synapse-monitoring/api-extractor.json new file mode 100644 index 000000000000..5fb5b21b56f2 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./esm/index.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json new file mode 100644 index 000000000000..46e520b0d243 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/package.json @@ -0,0 +1,48 @@ +{ + "name": "@azure/synapse-monitoring", + "author": "Microsoft Corporation", + "description": "A generated SDK for MonitoringClient.", + "version": "1.0.0", + "dependencies": { "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" }, + "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "license": "MIT", + "main": "./dist/synapse-monitoring.js", + "module": "./esm/index.js", + "types": "./esm/index.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js", + "repository": { + "type": "git", + "url": "https://github.com/Azure/azure-sdk-for-js.git" + }, + "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "src/**/*.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-monitoring.js.map'\" -o ./dist/synapse-monitoring.min.js ./dist/synapse-monitoring.js", + "prepack": "npm install && npm run build", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md b/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md new file mode 100644 index 000000000000..239afacafa82 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md @@ -0,0 +1,116 @@ +## API Report File for "@azure/synapse-monitoring" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; + +// @public (undocumented) +export class MonitoringClient extends MonitoringClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: MonitoringClientOptionalParams); + // Warning: (ae-forgotten-export) The symbol "Monitoring" needs to be exported by the entry point index.d.ts + // + // (undocumented) + monitoring: Monitoring; +} + +// @public (undocumented) +export class MonitoringClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: MonitoringClientOptionalParams); + // (undocumented) + apiVersion: string; + // (undocumented) + endpoint: string; +} + +// @public +export interface MonitoringClientOptionalParams extends coreHttp.ServiceClientOptions { + apiVersion?: string; + endpoint?: string; +} + +// @public +export interface MonitoringGetSparkJobListOptionalParams extends coreHttp.OperationOptions { + xMsClientRequestId?: string; +} + +// @public +export type MonitoringGetSparkJobListResponse = SparkJobListViewResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobListViewResponse; + }; +}; + +// @public +export interface MonitoringGetSqlJobQueryStringOptionalParams extends coreHttp.OperationOptions { + // (undocumented) + filter?: string; + // (undocumented) + orderby?: string; + // (undocumented) + skip?: string; + xMsClientRequestId?: string; +} + +// @public +export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlQueryStringDataModel; + }; +}; + +// @public (undocumented) +export interface SparkJob { + // (undocumented) + compute?: string; + // (undocumented) + endTime?: Date; + // (undocumented) + jobType?: string; + // (undocumented) + livyId?: string; + // (undocumented) + name?: string; + // (undocumented) + pipeline?: SparkJob[] | null; + // (undocumented) + queuedDuration?: string; + // (undocumented) + runningDuration?: string; + // (undocumented) + sparkApplicationId?: string; + // (undocumented) + sparkJobDefinition?: string | null; + // (undocumented) + state?: string; + // (undocumented) + submitter?: string; + // (undocumented) + submitTime?: Date; + // (undocumented) + timing?: string[]; + // (undocumented) + totalDuration?: string; +} + +// @public (undocumented) +export interface SparkJobListViewResponse { + // (undocumented) + nJobs?: number; + // (undocumented) + sparkJobs?: SparkJob[]; +} + +// @public (undocumented) +export interface SqlQueryStringDataModel { + // (undocumented) + query?: string; +} + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-monitoring/rollup.config.js b/sdk/synapse/synapse-monitoring/rollup.config.js new file mode 100644 index 000000000000..cc118fad214c --- /dev/null +++ b/sdk/synapse/synapse-monitoring/rollup.config.js @@ -0,0 +1,31 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./esm/monitoringClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/synapse-monitoring.js", + format: "umd", + name: "Azure.SynapseMonitoring", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [nodeResolve({ module: true }), sourcemaps()] +}; + +export default config; diff --git a/sdk/synapse/synapse-monitoring/src/index.ts b/sdk/synapse/synapse-monitoring/src/index.ts new file mode 100644 index 000000000000..b9b74b67ef61 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/index.ts @@ -0,0 +1,3 @@ +export * from "./models"; +export { MonitoringClient } from "./monitoringClient"; +export { MonitoringClientContext } from "./monitoringClientContext"; diff --git a/sdk/synapse/synapse-monitoring/src/models/index.ts b/sdk/synapse/synapse-monitoring/src/models/index.ts new file mode 100644 index 000000000000..4d28685ede7a --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/models/index.ts @@ -0,0 +1,108 @@ +import * as coreHttp from "@azure/core-http"; + +export interface SparkJobListViewResponse { + nJobs?: number; + sparkJobs?: SparkJob[]; +} + +export interface SparkJob { + state?: string; + name?: string; + submitter?: string; + compute?: string; + sparkApplicationId?: string; + livyId?: string; + timing?: string[]; + sparkJobDefinition?: string | null; + pipeline?: SparkJob[] | null; + jobType?: string; + submitTime?: Date; + endTime?: Date; + queuedDuration?: string; + runningDuration?: string; + totalDuration?: string; +} + +export interface SqlQueryStringDataModel { + query?: string; +} + +/** + * Optional parameters. + */ +export interface MonitoringGetSparkJobListOptionalParams + extends coreHttp.OperationOptions { + /** + * Can provide a guid, which is helpful for debugging and to provide better customer support + */ + xMsClientRequestId?: string; +} + +/** + * Contains response data for the getSparkJobList operation. + */ +export type MonitoringGetSparkJobListResponse = SparkJobListViewResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobListViewResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface MonitoringGetSqlJobQueryStringOptionalParams + extends coreHttp.OperationOptions { + /** + * Can provide a guid, which is helpful for debugging and to provide better customer support + */ + xMsClientRequestId?: string; + filter?: string; + orderby?: string; + skip?: string; +} + +/** + * Contains response data for the getSqlJobQueryString operation. + */ +export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlQueryStringDataModel; + }; +}; + +/** + * Optional parameters. + */ +export interface MonitoringClientOptionalParams + extends coreHttp.ServiceClientOptions { + /** + * Api Version + */ + apiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-monitoring/src/models/mappers.ts b/sdk/synapse/synapse-monitoring/src/models/mappers.ts new file mode 100644 index 000000000000..ec985d064a9c --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/models/mappers.ts @@ -0,0 +1,155 @@ +import * as coreHttp from "@azure/core-http"; + +export const SparkJobListViewResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobListViewResponse", + modelProperties: { + nJobs: { + serializedName: "nJobs", + type: { + name: "Number" + } + }, + sparkJobs: { + serializedName: "sparkJobs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkJob" + } + } + } + } + } + } +}; + +export const SparkJob: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJob", + modelProperties: { + state: { + serializedName: "state", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + submitter: { + serializedName: "submitter", + type: { + name: "String" + } + }, + compute: { + serializedName: "compute", + type: { + name: "String" + } + }, + sparkApplicationId: { + serializedName: "sparkApplicationId", + type: { + name: "String" + } + }, + livyId: { + serializedName: "livyId", + type: { + name: "String" + } + }, + timing: { + serializedName: "timing", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + sparkJobDefinition: { + serializedName: "sparkJobDefinition", + nullable: true, + type: { + name: "String" + } + }, + pipeline: { + serializedName: "pipeline", + nullable: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkJob" + } + } + } + }, + jobType: { + serializedName: "jobType", + type: { + name: "String" + } + }, + submitTime: { + serializedName: "submitTime", + type: { + name: "DateTime" + } + }, + endTime: { + serializedName: "endTime", + type: { + name: "DateTime" + } + }, + queuedDuration: { + serializedName: "queuedDuration", + type: { + name: "String" + } + }, + runningDuration: { + serializedName: "runningDuration", + type: { + name: "String" + } + }, + totalDuration: { + serializedName: "totalDuration", + type: { + name: "String" + } + } + } + } +}; + +export const SqlQueryStringDataModel: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlQueryStringDataModel", + modelProperties: { + query: { + serializedName: "query", + type: { + name: "String" + } + } + } + } +}; diff --git a/sdk/synapse/synapse-monitoring/src/models/parameters.ts b/sdk/synapse/synapse-monitoring/src/models/parameters.ts new file mode 100644 index 000000000000..6f239c88c26f --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/models/parameters.ts @@ -0,0 +1,81 @@ +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const xMsClientRequestId: OperationParameter = { + parameterPath: ["options", "xMsClientRequestId"], + mapper: { + serializedName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; + +export const apiVersion: OperationQueryParameter = { + parameterPath: "apiVersion", + mapper: { + defaultValue: "2019-11-01-preview", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const filter: OperationQueryParameter = { + parameterPath: ["options", "filter"], + mapper: { + serializedName: "filter", + type: { + name: "String" + } + } +}; + +export const orderby: OperationQueryParameter = { + parameterPath: ["options", "orderby"], + mapper: { + serializedName: "$orderby", + type: { + name: "String" + } + } +}; + +export const skip: OperationQueryParameter = { + parameterPath: ["options", "skip"], + mapper: { + serializedName: "skip", + type: { + name: "String" + } + } +}; diff --git a/sdk/synapse/synapse-monitoring/src/monitoringClient.ts b/sdk/synapse/synapse-monitoring/src/monitoringClient.ts new file mode 100644 index 000000000000..5baddaef9293 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/monitoringClient.ts @@ -0,0 +1,24 @@ +import * as coreHttp from "@azure/core-http"; +import { Monitoring } from "./operations"; +import { MonitoringClientContext } from "./monitoringClientContext"; +import { MonitoringClientOptionalParams } from "./models"; + +export class MonitoringClient extends MonitoringClientContext { + /** + * Initializes a new instance of the MonitoringClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: MonitoringClientOptionalParams + ) { + super(credentials, endpoint, options); + this.monitoring = new Monitoring(this); + } + + monitoring: Monitoring; +} diff --git a/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts b/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts new file mode 100644 index 000000000000..03831c9ea134 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts @@ -0,0 +1,52 @@ +import * as coreHttp from "@azure/core-http"; +import { MonitoringClientOptionalParams } from "./models"; + +const packageName = "@azure/synapse-monitoring"; +const packageVersion = "1.0.0"; + +export class MonitoringClientContext extends coreHttp.ServiceClient { + endpoint: string; + apiVersion: string; + + /** + * Initializes a new instance of the MonitoringClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: MonitoringClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = options.endpoint || "{endpoint}"; + + // Parameter assignments + this.endpoint = endpoint; + + // Assigning values to Constant parameters + this.apiVersion = options.apiVersion || "2019-11-01-preview"; + } +} diff --git a/sdk/synapse/synapse-monitoring/src/operations/index.ts b/sdk/synapse/synapse-monitoring/src/operations/index.ts new file mode 100644 index 000000000000..c3443639fc87 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/operations/index.ts @@ -0,0 +1 @@ +export * from "./monitoring"; diff --git a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts new file mode 100644 index 000000000000..53f2223463d5 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts @@ -0,0 +1,92 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { MonitoringClient } from "../monitoringClient"; +import { + MonitoringGetSparkJobListOptionalParams, + MonitoringGetSparkJobListResponse, + MonitoringGetSqlJobQueryStringOptionalParams, + MonitoringGetSqlJobQueryStringResponse +} from "../models"; + +/** + * Class representing a Monitoring. + */ +export class Monitoring { + private readonly client: MonitoringClient; + + /** + * Initialize a new instance of the class Monitoring class. + * @param client Reference to the service client + */ + constructor(client: MonitoringClient) { + this.client = client; + } + + /** + * Get list of spark applications for the workspace. + * @param options The options parameters. + */ + getSparkJobList( + options?: MonitoringGetSparkJobListOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkJobListOperationSpec + ) as Promise; + } + + /** + * Get SQL OD/DW Query for the workspace. + * @param options The options parameters. + */ + getSqlJobQueryString( + options?: MonitoringGetSqlJobQueryStringOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSqlJobQueryStringOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSparkJobListOperationSpec: coreHttp.OperationSpec = { + path: "/monitoring/workloadTypes/spark/Applications", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkJobListViewResponse + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.xMsClientRequestId], + serializer +}; +const getSqlJobQueryStringOperationSpec: coreHttp.OperationSpec = { + path: "/monitoring/workloadTypes/sql/querystring", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlQueryStringDataModel + } + }, + queryParameters: [ + Parameters.apiVersion, + Parameters.filter, + Parameters.orderby, + Parameters.skip + ], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.xMsClientRequestId], + serializer +}; diff --git a/sdk/synapse/synapse-monitoring/tsconfig.json b/sdk/synapse/synapse-monitoring/tsconfig.json new file mode 100644 index 000000000000..0290d6707a44 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/sdk/synapse/synapse-spark/README.md b/sdk/synapse/synapse-spark/README.md new file mode 100644 index 000000000000..144dbf767bd7 --- /dev/null +++ b/sdk/synapse/synapse-spark/README.md @@ -0,0 +1,27 @@ +## Azure SparkClient SDK for JavaScript + +This package contains an isomorphic SDK for SparkClient. + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +### How to Install + +```bash +npm install @azure/synapse-spark +``` + +### How to use + +#### Sample code + +Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file diff --git a/sdk/synapse/synapse-spark/api-extractor.json b/sdk/synapse/synapse-spark/api-extractor.json new file mode 100644 index 000000000000..5fb5b21b56f2 --- /dev/null +++ b/sdk/synapse/synapse-spark/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./esm/index.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json new file mode 100644 index 000000000000..67d5c1cc9ac7 --- /dev/null +++ b/sdk/synapse/synapse-spark/package.json @@ -0,0 +1,48 @@ +{ + "name": "@azure/synapse-spark", + "author": "Microsoft Corporation", + "description": "A generated SDK for SparkClient.", + "version": "1.0.0", + "dependencies": { "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" }, + "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "license": "MIT", + "main": "./dist/synapse-spark.js", + "module": "./esm/index.js", + "types": "./esm/index.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js", + "repository": { + "type": "git", + "url": "https://github.com/Azure/azure-sdk-for-js.git" + }, + "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "src/**/*.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-spark.js.map'\" -o ./dist/synapse-spark.min.js ./dist/synapse-spark.js", + "prepack": "npm install && npm run build", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-spark/review/synapse-spark.api.md b/sdk/synapse/synapse-spark/review/synapse-spark.api.md new file mode 100644 index 000000000000..e1689d19b61d --- /dev/null +++ b/sdk/synapse/synapse-spark/review/synapse-spark.api.md @@ -0,0 +1,582 @@ +## API Report File for "@azure/synapse-spark" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; + +// @public +export const enum KnownPluginCurrentState { + // (undocumented) + Cleanup = "Cleanup", + // (undocumented) + Ended = "Ended", + // (undocumented) + Monitoring = "Monitoring", + // (undocumented) + Preparation = "Preparation", + // (undocumented) + Queued = "Queued", + // (undocumented) + ResourceAcquisition = "ResourceAcquisition", + // (undocumented) + Submission = "Submission" +} + +// @public +export const enum KnownSchedulerCurrentState { + // (undocumented) + Ended = "Ended", + // (undocumented) + Queued = "Queued", + // (undocumented) + Scheduled = "Scheduled" +} + +// @public +export const enum KnownSparkBatchJobResultType { + // (undocumented) + Cancelled = "Cancelled", + // (undocumented) + Failed = "Failed", + // (undocumented) + Succeeded = "Succeeded", + // (undocumented) + Uncertain = "Uncertain" +} + +// @public +export const enum KnownSparkErrorSource { + // (undocumented) + Dependency = "Dependency", + // (undocumented) + System = "System", + // (undocumented) + Unknown = "Unknown", + // (undocumented) + User = "User" +} + +// @public +export const enum KnownSparkJobType { + // (undocumented) + SparkBatch = "SparkBatch", + // (undocumented) + SparkSession = "SparkSession" +} + +// @public +export const enum KnownSparkSessionResultType { + // (undocumented) + Cancelled = "Cancelled", + // (undocumented) + Failed = "Failed", + // (undocumented) + Succeeded = "Succeeded", + // (undocumented) + Uncertain = "Uncertain" +} + +// @public +export const enum KnownSparkStatementLanguageType { + // (undocumented) + Dotnetspark = "dotnetspark", + // (undocumented) + Pyspark = "pyspark", + // (undocumented) + Spark = "spark", + // (undocumented) + Sql = "sql" +} + +// @public +export type PluginCurrentState = string; + +// @public +export type SchedulerCurrentState = string; + +// @public +export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; +} + +// @public +export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJob; + }; +}; + +// @public +export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; +} + +// @public +export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJob; + }; +}; + +// @public +export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; + fromParam?: number; + size?: number; +} + +// @public +export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJobCollection; + }; +}; + +// @public (undocumented) +export interface SparkBatchJob { + appId?: string | null; + appInfo?: { + [propertyName: string]: string; + } | null; + artifactId?: string; + errors?: SparkServiceError[]; + id: number; + jobType?: SparkJobType; + // (undocumented) + livyInfo?: SparkBatchJobState; + logLines?: string[] | null; + name?: string; + plugin?: SparkServicePlugin; + result?: SparkBatchJobResultType; + scheduler?: SparkScheduler; + sparkPoolName?: string; + state?: string; + submitterId?: string; + submitterName?: string; + tags?: { + [propertyName: string]: string; + }; + workspaceName?: string; +} + +// @public +export interface SparkBatchJobCollection { + from: number; + sessions?: SparkBatchJob[]; + total: number; +} + +// @public (undocumented) +export interface SparkBatchJobOptions { + // (undocumented) + archives?: string[]; + // (undocumented) + arguments?: string[]; + // (undocumented) + artifactId?: string; + // (undocumented) + className?: string; + configuration?: { + [propertyName: string]: string; + }; + // (undocumented) + driverCores?: number; + // (undocumented) + driverMemory?: string; + // (undocumented) + executorCores?: number; + // (undocumented) + executorCount?: number; + // (undocumented) + executorMemory?: string; + // (undocumented) + file: string; + // (undocumented) + files?: string[]; + // (undocumented) + jars?: string[]; + // (undocumented) + name: string; + // (undocumented) + pythonFiles?: string[]; + tags?: { + [propertyName: string]: string; + }; +} + +// @public +export type SparkBatchJobResultType = string; + +// @public (undocumented) +export interface SparkBatchJobState { + currentState?: string; + deadAt?: Date | null; + // (undocumented) + jobCreationRequest?: SparkRequest; + notStartedAt?: Date | null; + recoveringAt?: Date | null; + runningAt?: Date | null; + startingAt?: Date | null; + successAt?: Date | null; + terminatedAt?: Date | null; +} + +// @public (undocumented) +export class SparkClient extends SparkClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams); + // Warning: (ae-forgotten-export) The symbol "SparkBatch" needs to be exported by the entry point index.d.ts + // + // (undocumented) + sparkBatch: SparkBatch; + // Warning: (ae-forgotten-export) The symbol "SparkSession" needs to be exported by the entry point index.d.ts + // + // (undocumented) + sparkSession: SparkSession_2; +} + +// @public (undocumented) +export class SparkClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams); + // (undocumented) + endpoint: string; + // (undocumented) + livyApiVersion: string; + // (undocumented) + sparkPoolName: string; +} + +// @public +export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { + endpoint?: string; + livyApiVersion?: string; +} + +// @public +export type SparkErrorSource = string; + +// @public +export type SparkJobType = string; + +// @public (undocumented) +export interface SparkRequest { + // (undocumented) + archives?: string[]; + // (undocumented) + arguments?: string[]; + // (undocumented) + className?: string; + configuration?: { + [propertyName: string]: string; + }; + // (undocumented) + driverCores?: number; + // (undocumented) + driverMemory?: string; + // (undocumented) + executorCores?: number; + // (undocumented) + executorCount?: number; + // (undocumented) + executorMemory?: string; + // (undocumented) + file?: string; + // (undocumented) + files?: string[]; + // (undocumented) + jars?: string[]; + // (undocumented) + name?: string; + // (undocumented) + pythonFiles?: string[]; +} + +// @public (undocumented) +export interface SparkScheduler { + // (undocumented) + cancellationRequestedAt?: Date; + // (undocumented) + currentState?: SchedulerCurrentState; + // (undocumented) + endedAt?: Date | null; + // (undocumented) + scheduledAt?: Date | null; + // (undocumented) + submittedAt?: Date | null; +} + +// @public (undocumented) +export interface SparkServiceError { + // (undocumented) + errorCode?: string; + // (undocumented) + message?: string; + // (undocumented) + source?: SparkErrorSource; +} + +// @public (undocumented) +export interface SparkServicePlugin { + // (undocumented) + cleanupStartedAt?: Date | null; + // (undocumented) + currentState?: PluginCurrentState; + // (undocumented) + monitoringStartedAt?: Date | null; + // (undocumented) + preparationStartedAt?: Date | null; + // (undocumented) + resourceAcquisitionStartedAt?: Date | null; + // (undocumented) + submissionStartedAt?: Date | null; +} + +// @public (undocumented) +export interface SparkSession { + // (undocumented) + appId?: string | null; + appInfo?: { + [propertyName: string]: string; + } | null; + // (undocumented) + artifactId?: string; + errors?: SparkServiceError[]; + // (undocumented) + id: number; + jobType?: SparkJobType; + // (undocumented) + livyInfo?: SparkSessionState; + // (undocumented) + logLines?: string[] | null; + // (undocumented) + name?: string; + // (undocumented) + plugin?: SparkServicePlugin; + // (undocumented) + result?: SparkSessionResultType; + // (undocumented) + scheduler?: SparkScheduler; + // (undocumented) + sparkPoolName?: string; + // (undocumented) + state?: string; + // (undocumented) + submitterId?: string; + // (undocumented) + submitterName?: string; + tags?: { + [propertyName: string]: string; + }; + // (undocumented) + workspaceName?: string; +} + +// @public +export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellationResult & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkStatementCancellationResult; + }; +}; + +// @public (undocumented) +export interface SparkSessionCollection { + // (undocumented) + from: number; + // (undocumented) + sessions?: SparkSession[]; + // (undocumented) + total: number; +} + +// @public +export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; +} + +// @public +export type SparkSessionCreateSparkSessionResponse = SparkSession & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkSession; + }; +}; + +// @public +export type SparkSessionCreateSparkStatementResponse = SparkStatement & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkStatement; + }; +}; + +// @public +export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; +} + +// @public +export type SparkSessionGetSparkSessionResponse = SparkSession & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkSession; + }; +}; + +// @public +export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; + fromParam?: number; + size?: number; +} + +// @public +export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkSessionCollection; + }; +}; + +// @public +export type SparkSessionGetSparkStatementResponse = SparkStatement & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkStatement; + }; +}; + +// @public +export type SparkSessionGetSparkStatementsResponse = SparkStatementCollection & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkStatementCollection; + }; +}; + +// @public (undocumented) +export interface SparkSessionOptions { + // (undocumented) + archives?: string[]; + // (undocumented) + arguments?: string[]; + // (undocumented) + artifactId?: string; + // (undocumented) + className?: string; + configuration?: { + [propertyName: string]: string; + }; + // (undocumented) + driverCores?: number; + // (undocumented) + driverMemory?: string; + // (undocumented) + executorCores?: number; + // (undocumented) + executorCount?: number; + // (undocumented) + executorMemory?: string; + // (undocumented) + file?: string; + // (undocumented) + files?: string[]; + // (undocumented) + jars?: string[]; + // (undocumented) + name: string; + // (undocumented) + pythonFiles?: string[]; + tags?: { + [propertyName: string]: string; + }; +} + +// @public +export type SparkSessionResultType = string; + +// @public (undocumented) +export interface SparkSessionState { + // (undocumented) + busyAt?: Date | null; + // (undocumented) + currentState?: string; + // (undocumented) + deadAt?: Date | null; + // (undocumented) + errorAt?: Date | null; + // (undocumented) + idleAt?: Date | null; + // (undocumented) + jobCreationRequest?: SparkRequest; + // (undocumented) + notStartedAt?: Date | null; + // (undocumented) + recoveringAt?: Date | null; + // (undocumented) + shuttingDownAt?: Date | null; + // (undocumented) + startingAt?: Date | null; + terminatedAt?: Date | null; +} + +// @public (undocumented) +export interface SparkStatement { + // (undocumented) + code?: string; + // (undocumented) + id: number; + // (undocumented) + output?: SparkStatementOutput; + // (undocumented) + state?: string; +} + +// @public (undocumented) +export interface SparkStatementCancellationResult { + // (undocumented) + msg?: string; +} + +// @public (undocumented) +export interface SparkStatementCollection { + // (undocumented) + statements?: SparkStatement[]; + // (undocumented) + total: number; +} + +// @public +export type SparkStatementLanguageType = string; + +// @public (undocumented) +export interface SparkStatementOptions { + // (undocumented) + code?: string; + // (undocumented) + kind?: SparkStatementLanguageType; +} + +// @public (undocumented) +export interface SparkStatementOutput { + data?: any; + // (undocumented) + errorName?: string; + // (undocumented) + errorValue?: string; + // (undocumented) + executionCount: number; + // (undocumented) + status?: string; + // (undocumented) + traceback?: string[]; +} + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-spark/rollup.config.js b/sdk/synapse/synapse-spark/rollup.config.js new file mode 100644 index 000000000000..ed9214dab22b --- /dev/null +++ b/sdk/synapse/synapse-spark/rollup.config.js @@ -0,0 +1,31 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./esm/sparkClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/synapse-spark.js", + format: "umd", + name: "Azure.SynapseSpark", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [nodeResolve({ module: true }), sourcemaps()] +}; + +export default config; diff --git a/sdk/synapse/synapse-spark/src/index.ts b/sdk/synapse/synapse-spark/src/index.ts new file mode 100644 index 000000000000..03e9c45aacdc --- /dev/null +++ b/sdk/synapse/synapse-spark/src/index.ts @@ -0,0 +1,3 @@ +export * from "./models"; +export { SparkClient } from "./sparkClient"; +export { SparkClientContext } from "./sparkClientContext"; diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts new file mode 100644 index 000000000000..c388e24dc015 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -0,0 +1,762 @@ +import * as coreHttp from "@azure/core-http"; + +/** + * Response for batch list operation. + */ +export interface SparkBatchJobCollection { + /** + * The start index of fetched sessions. + */ + from: number; + /** + * Number of sessions fetched. + */ + total: number; + /** + * Batch list + */ + sessions?: SparkBatchJob[]; +} + +export interface SparkBatchJob { + livyInfo?: SparkBatchJobState; + /** + * The batch name. + */ + name?: string; + /** + * The workspace name. + */ + workspaceName?: string; + /** + * The Spark pool name. + */ + sparkPoolName?: string; + /** + * The submitter name. + */ + submitterName?: string; + /** + * The submitter identifier. + */ + submitterId?: string; + /** + * The artifact identifier. + */ + artifactId?: string; + /** + * The job type. + */ + jobType?: SparkJobType; + /** + * The Spark batch job result. + */ + result?: SparkBatchJobResultType; + /** + * The scheduler information. + */ + scheduler?: SparkScheduler; + /** + * The plugin information. + */ + plugin?: SparkServicePlugin; + /** + * The error information. + */ + errors?: SparkServiceError[]; + /** + * The tags. + */ + tags?: { [propertyName: string]: string }; + /** + * The session Id. + */ + id: number; + /** + * The application id of this session + */ + appId?: string | null; + /** + * The detailed application info. + */ + appInfo?: { [propertyName: string]: string } | null; + /** + * The batch state + */ + state?: string; + /** + * The log lines. + */ + logLines?: string[] | null; +} + +export interface SparkBatchJobState { + /** + * the time that at which "not_started" livy state was first seen. + */ + notStartedAt?: Date | null; + /** + * the time that at which "starting" livy state was first seen. + */ + startingAt?: Date | null; + /** + * the time that at which "running" livy state was first seen. + */ + runningAt?: Date | null; + /** + * time that at which "dead" livy state was first seen. + */ + deadAt?: Date | null; + /** + * the time that at which "success" livy state was first seen. + */ + successAt?: Date | null; + /** + * the time that at which "killed" livy state was first seen. + */ + terminatedAt?: Date | null; + /** + * the time that at which "recovering" livy state was first seen. + */ + recoveringAt?: Date | null; + /** + * the Spark job state. + */ + currentState?: string; + jobCreationRequest?: SparkRequest; +} + +export interface SparkRequest { + name?: string; + file?: string; + className?: string; + arguments?: string[]; + jars?: string[]; + pythonFiles?: string[]; + files?: string[]; + archives?: string[]; + /** + * Dictionary of + */ + configuration?: { [propertyName: string]: string }; + driverMemory?: string; + driverCores?: number; + executorMemory?: string; + executorCores?: number; + executorCount?: number; +} + +export interface SparkScheduler { + submittedAt?: Date | null; + scheduledAt?: Date | null; + endedAt?: Date | null; + cancellationRequestedAt?: Date; + currentState?: SchedulerCurrentState; +} + +export interface SparkServicePlugin { + preparationStartedAt?: Date | null; + resourceAcquisitionStartedAt?: Date | null; + submissionStartedAt?: Date | null; + monitoringStartedAt?: Date | null; + cleanupStartedAt?: Date | null; + currentState?: PluginCurrentState; +} + +export interface SparkServiceError { + message?: string; + errorCode?: string; + source?: SparkErrorSource; +} + +export interface SparkBatchJobOptions { + /** + * Dictionary of + */ + tags?: { [propertyName: string]: string }; + artifactId?: string; + name: string; + file: string; + className?: string; + arguments?: string[]; + jars?: string[]; + pythonFiles?: string[]; + files?: string[]; + archives?: string[]; + /** + * Dictionary of + */ + configuration?: { [propertyName: string]: string }; + driverMemory?: string; + driverCores?: number; + executorMemory?: string; + executorCores?: number; + executorCount?: number; +} + +export interface SparkSessionCollection { + from: number; + total: number; + sessions?: SparkSession[]; +} + +export interface SparkSession { + livyInfo?: SparkSessionState; + name?: string; + workspaceName?: string; + sparkPoolName?: string; + submitterName?: string; + submitterId?: string; + artifactId?: string; + /** + * The job type. + */ + jobType?: SparkJobType; + result?: SparkSessionResultType; + scheduler?: SparkScheduler; + plugin?: SparkServicePlugin; + /** + * The error information. + */ + errors?: SparkServiceError[]; + /** + * Dictionary of + */ + tags?: { [propertyName: string]: string }; + id: number; + appId?: string | null; + /** + * Dictionary of + */ + appInfo?: { [propertyName: string]: string } | null; + state?: string; + logLines?: string[] | null; +} + +export interface SparkSessionState { + notStartedAt?: Date | null; + startingAt?: Date | null; + idleAt?: Date | null; + deadAt?: Date | null; + shuttingDownAt?: Date | null; + /** + * the time that at which "killed" livy state was first seen. + */ + terminatedAt?: Date | null; + recoveringAt?: Date | null; + busyAt?: Date | null; + errorAt?: Date | null; + currentState?: string; + jobCreationRequest?: SparkRequest; +} + +export interface SparkSessionOptions { + /** + * Dictionary of + */ + tags?: { [propertyName: string]: string }; + artifactId?: string; + name: string; + file?: string; + className?: string; + arguments?: string[]; + jars?: string[]; + pythonFiles?: string[]; + files?: string[]; + archives?: string[]; + /** + * Dictionary of + */ + configuration?: { [propertyName: string]: string }; + driverMemory?: string; + driverCores?: number; + executorMemory?: string; + executorCores?: number; + executorCount?: number; +} + +export interface SparkStatementCollection { + total: number; + statements?: SparkStatement[]; +} + +export interface SparkStatement { + id: number; + code?: string; + state?: string; + output?: SparkStatementOutput; +} + +export interface SparkStatementOutput { + status?: string; + executionCount: number; + /** + * Any object + */ + data?: any; + errorName?: string; + errorValue?: string; + traceback?: string[]; +} + +export interface SparkStatementOptions { + code?: string; + kind?: SparkStatementLanguageType; +} + +export interface SparkStatementCancellationResult { + msg?: string; +} + +/** + * Known values of {@link SparkJobType} that the service accepts. + */ +export const enum KnownSparkJobType { + SparkBatch = "SparkBatch", + SparkSession = "SparkSession" +} + +/** + * Defines values for SparkJobType. \ + * {@link KnownSparkJobType} can be used interchangeably with SparkJobType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SparkBatch** \ + * **SparkSession** + */ +export type SparkJobType = string; + +/** + * Known values of {@link SparkBatchJobResultType} that the service accepts. + */ +export const enum KnownSparkBatchJobResultType { + Uncertain = "Uncertain", + Succeeded = "Succeeded", + Failed = "Failed", + Cancelled = "Cancelled" +} + +/** + * Defines values for SparkBatchJobResultType. \ + * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Uncertain** \ + * **Succeeded** \ + * **Failed** \ + * **Cancelled** + */ +export type SparkBatchJobResultType = string; + +/** + * Known values of {@link SchedulerCurrentState} that the service accepts. + */ +export const enum KnownSchedulerCurrentState { + Queued = "Queued", + Scheduled = "Scheduled", + Ended = "Ended" +} + +/** + * Defines values for SchedulerCurrentState. \ + * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Queued** \ + * **Scheduled** \ + * **Ended** + */ +export type SchedulerCurrentState = string; + +/** + * Known values of {@link PluginCurrentState} that the service accepts. + */ +export const enum KnownPluginCurrentState { + Preparation = "Preparation", + ResourceAcquisition = "ResourceAcquisition", + Queued = "Queued", + Submission = "Submission", + Monitoring = "Monitoring", + Cleanup = "Cleanup", + Ended = "Ended" +} + +/** + * Defines values for PluginCurrentState. \ + * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Preparation** \ + * **ResourceAcquisition** \ + * **Queued** \ + * **Submission** \ + * **Monitoring** \ + * **Cleanup** \ + * **Ended** + */ +export type PluginCurrentState = string; + +/** + * Known values of {@link SparkErrorSource} that the service accepts. + */ +export const enum KnownSparkErrorSource { + System = "System", + User = "User", + Unknown = "Unknown", + Dependency = "Dependency" +} + +/** + * Defines values for SparkErrorSource. \ + * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **System** \ + * **User** \ + * **Unknown** \ + * **Dependency** + */ +export type SparkErrorSource = string; + +/** + * Known values of {@link SparkSessionResultType} that the service accepts. + */ +export const enum KnownSparkSessionResultType { + Uncertain = "Uncertain", + Succeeded = "Succeeded", + Failed = "Failed", + Cancelled = "Cancelled" +} + +/** + * Defines values for SparkSessionResultType. \ + * {@link KnownSparkSessionResultType} can be used interchangeably with SparkSessionResultType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Uncertain** \ + * **Succeeded** \ + * **Failed** \ + * **Cancelled** + */ +export type SparkSessionResultType = string; + +/** + * Known values of {@link SparkStatementLanguageType} that the service accepts. + */ +export const enum KnownSparkStatementLanguageType { + Spark = "spark", + Pyspark = "pyspark", + Dotnetspark = "dotnetspark", + Sql = "sql" +} + +/** + * Defines values for SparkStatementLanguageType. \ + * {@link KnownSparkStatementLanguageType} can be used interchangeably with SparkStatementLanguageType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **spark** \ + * **pyspark** \ + * **dotnetspark** \ + * **sql** + */ +export type SparkStatementLanguageType = string; + +/** + * Optional parameters. + */ +export interface SparkBatchGetSparkBatchJobsOptionalParams + extends coreHttp.OperationOptions { + /** + * Optional param specifying which index the list should begin from. + */ + fromParam?: number; + /** + * Optional param specifying the size of the returned list. + * By default it is 20 and that is the maximum. + */ + size?: number; + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the getSparkBatchJobs operation. + */ +export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJobCollection; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkBatchCreateSparkBatchJobOptionalParams + extends coreHttp.OperationOptions { + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the createSparkBatchJob operation. + */ +export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJob; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkBatchGetSparkBatchJobOptionalParams + extends coreHttp.OperationOptions { + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the getSparkBatchJob operation. + */ +export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJob; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkSessionGetSparkSessionsOptionalParams + extends coreHttp.OperationOptions { + /** + * Optional param specifying which index the list should begin from. + */ + fromParam?: number; + /** + * Optional param specifying the size of the returned list. + * By default it is 20 and that is the maximum. + */ + size?: number; + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the getSparkSessions operation. + */ +export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkSessionCollection; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkSessionCreateSparkSessionOptionalParams + extends coreHttp.OperationOptions { + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the createSparkSession operation. + */ +export type SparkSessionCreateSparkSessionResponse = SparkSession & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkSession; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkSessionGetSparkSessionOptionalParams + extends coreHttp.OperationOptions { + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the getSparkSession operation. + */ +export type SparkSessionGetSparkSessionResponse = SparkSession & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkSession; + }; +}; + +/** + * Contains response data for the getSparkStatements operation. + */ +export type SparkSessionGetSparkStatementsResponse = SparkStatementCollection & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkStatementCollection; + }; +}; + +/** + * Contains response data for the createSparkStatement operation. + */ +export type SparkSessionCreateSparkStatementResponse = SparkStatement & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkStatement; + }; +}; + +/** + * Contains response data for the getSparkStatement operation. + */ +export type SparkSessionGetSparkStatementResponse = SparkStatement & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkStatement; + }; +}; + +/** + * Contains response data for the cancelSparkStatement operation. + */ +export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellationResult & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkStatementCancellationResult; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkClientOptionalParams + extends coreHttp.ServiceClientOptions { + /** + * Valid api-version for the request. + */ + livyApiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-spark/src/models/mappers.ts b/sdk/synapse/synapse-spark/src/models/mappers.ts new file mode 100644 index 000000000000..36d46edc918f --- /dev/null +++ b/sdk/synapse/synapse-spark/src/models/mappers.ts @@ -0,0 +1,1156 @@ +import * as coreHttp from "@azure/core-http"; + +export const SparkBatchJobCollection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJobCollection", + modelProperties: { + from: { + serializedName: "from", + required: true, + type: { + name: "Number" + } + }, + total: { + serializedName: "total", + required: true, + type: { + name: "Number" + } + }, + sessions: { + serializedName: "sessions", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkBatchJob" + } + } + } + } + } + } +}; + +export const SparkBatchJob: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJob", + modelProperties: { + livyInfo: { + serializedName: "livyInfo", + type: { + name: "Composite", + className: "SparkBatchJobState" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + workspaceName: { + serializedName: "workspaceName", + type: { + name: "String" + } + }, + sparkPoolName: { + serializedName: "sparkPoolName", + type: { + name: "String" + } + }, + submitterName: { + serializedName: "submitterName", + type: { + name: "String" + } + }, + submitterId: { + serializedName: "submitterId", + type: { + name: "String" + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + jobType: { + serializedName: "jobType", + type: { + name: "String" + } + }, + result: { + serializedName: "result", + type: { + name: "String" + } + }, + scheduler: { + serializedName: "schedulerInfo", + type: { + name: "Composite", + className: "SparkScheduler" + } + }, + plugin: { + serializedName: "pluginInfo", + type: { + name: "Composite", + className: "SparkServicePlugin" + } + }, + errors: { + serializedName: "errorInfo", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkServiceError" + } + } + } + }, + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + id: { + serializedName: "id", + required: true, + type: { + name: "Number" + } + }, + appId: { + serializedName: "appId", + nullable: true, + type: { + name: "String" + } + }, + appInfo: { + serializedName: "appInfo", + nullable: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + state: { + serializedName: "state", + type: { + name: "String" + } + }, + logLines: { + serializedName: "log", + nullable: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const SparkBatchJobState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJobState", + modelProperties: { + notStartedAt: { + serializedName: "notStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + startingAt: { + serializedName: "startingAt", + nullable: true, + type: { + name: "DateTime" + } + }, + runningAt: { + serializedName: "runningAt", + nullable: true, + type: { + name: "DateTime" + } + }, + deadAt: { + serializedName: "deadAt", + nullable: true, + type: { + name: "DateTime" + } + }, + successAt: { + serializedName: "successAt", + nullable: true, + type: { + name: "DateTime" + } + }, + terminatedAt: { + serializedName: "killedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + recoveringAt: { + serializedName: "recoveringAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + }, + jobCreationRequest: { + serializedName: "jobCreationRequest", + type: { + name: "Composite", + className: "SparkRequest" + } + } + } + } +}; + +export const SparkRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkRequest", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + file: { + serializedName: "file", + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + arguments: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + pythonFiles: { + serializedName: "pyFiles", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + configuration: { + serializedName: "conf", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + driverMemory: { + serializedName: "driverMemory", + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + type: { + name: "Number" + } + }, + executorCount: { + serializedName: "numExecutors", + type: { + name: "Number" + } + } + } + } +}; + +export const SparkScheduler: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkScheduler", + modelProperties: { + submittedAt: { + serializedName: "submittedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + scheduledAt: { + serializedName: "scheduledAt", + nullable: true, + type: { + name: "DateTime" + } + }, + endedAt: { + serializedName: "endedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + cancellationRequestedAt: { + serializedName: "cancellationRequestedAt", + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + } + } + } +}; + +export const SparkServicePlugin: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkServicePlugin", + modelProperties: { + preparationStartedAt: { + serializedName: "preparationStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + resourceAcquisitionStartedAt: { + serializedName: "resourceAcquisitionStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + submissionStartedAt: { + serializedName: "submissionStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + monitoringStartedAt: { + serializedName: "monitoringStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + cleanupStartedAt: { + serializedName: "cleanupStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + } + } + } +}; + +export const SparkServiceError: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkServiceError", + modelProperties: { + message: { + serializedName: "message", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "errorCode", + type: { + name: "String" + } + }, + source: { + serializedName: "source", + type: { + name: "String" + } + } + } + } +}; + +export const SparkBatchJobOptions: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJobOptions", + modelProperties: { + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + file: { + serializedName: "file", + required: true, + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + arguments: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + pythonFiles: { + serializedName: "pyFiles", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + configuration: { + serializedName: "conf", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + driverMemory: { + serializedName: "driverMemory", + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + type: { + name: "Number" + } + }, + executorCount: { + serializedName: "numExecutors", + type: { + name: "Number" + } + } + } + } +}; + +export const SparkSessionCollection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkSessionCollection", + modelProperties: { + from: { + serializedName: "from", + required: true, + type: { + name: "Number" + } + }, + total: { + serializedName: "total", + required: true, + type: { + name: "Number" + } + }, + sessions: { + serializedName: "sessions", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkSession" + } + } + } + } + } + } +}; + +export const SparkSession: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkSession", + modelProperties: { + livyInfo: { + serializedName: "livyInfo", + type: { + name: "Composite", + className: "SparkSessionState" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + workspaceName: { + serializedName: "workspaceName", + type: { + name: "String" + } + }, + sparkPoolName: { + serializedName: "sparkPoolName", + type: { + name: "String" + } + }, + submitterName: { + serializedName: "submitterName", + type: { + name: "String" + } + }, + submitterId: { + serializedName: "submitterId", + type: { + name: "String" + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + jobType: { + serializedName: "jobType", + type: { + name: "String" + } + }, + result: { + serializedName: "result", + type: { + name: "String" + } + }, + scheduler: { + serializedName: "schedulerInfo", + type: { + name: "Composite", + className: "SparkScheduler" + } + }, + plugin: { + serializedName: "pluginInfo", + type: { + name: "Composite", + className: "SparkServicePlugin" + } + }, + errors: { + serializedName: "errorInfo", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkServiceError" + } + } + } + }, + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + id: { + serializedName: "id", + required: true, + type: { + name: "Number" + } + }, + appId: { + serializedName: "appId", + nullable: true, + type: { + name: "String" + } + }, + appInfo: { + serializedName: "appInfo", + nullable: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + state: { + serializedName: "state", + type: { + name: "String" + } + }, + logLines: { + serializedName: "log", + nullable: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const SparkSessionState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkSessionState", + modelProperties: { + notStartedAt: { + serializedName: "notStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + startingAt: { + serializedName: "startingAt", + nullable: true, + type: { + name: "DateTime" + } + }, + idleAt: { + serializedName: "idleAt", + nullable: true, + type: { + name: "DateTime" + } + }, + deadAt: { + serializedName: "deadAt", + nullable: true, + type: { + name: "DateTime" + } + }, + shuttingDownAt: { + serializedName: "shuttingDownAt", + nullable: true, + type: { + name: "DateTime" + } + }, + terminatedAt: { + serializedName: "killedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + recoveringAt: { + serializedName: "recoveringAt", + nullable: true, + type: { + name: "DateTime" + } + }, + busyAt: { + serializedName: "busyAt", + nullable: true, + type: { + name: "DateTime" + } + }, + errorAt: { + serializedName: "errorAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + }, + jobCreationRequest: { + serializedName: "jobCreationRequest", + type: { + name: "Composite", + className: "SparkRequest" + } + } + } + } +}; + +export const SparkSessionOptions: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkSessionOptions", + modelProperties: { + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + file: { + serializedName: "file", + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + arguments: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + pythonFiles: { + serializedName: "pyFiles", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + configuration: { + serializedName: "conf", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + driverMemory: { + serializedName: "driverMemory", + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + type: { + name: "Number" + } + }, + executorCount: { + serializedName: "numExecutors", + type: { + name: "Number" + } + } + } + } +}; + +export const SparkStatementCollection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatementCollection", + modelProperties: { + total: { + serializedName: "total_statements", + required: true, + type: { + name: "Number" + } + }, + statements: { + serializedName: "statements", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkStatement" + } + } + } + } + } + } +}; + +export const SparkStatement: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatement", + modelProperties: { + id: { + serializedName: "id", + required: true, + type: { + name: "Number" + } + }, + code: { + serializedName: "code", + type: { + name: "String" + } + }, + state: { + serializedName: "state", + type: { + name: "String" + } + }, + output: { + serializedName: "output", + type: { + name: "Composite", + className: "SparkStatementOutput" + } + } + } + } +}; + +export const SparkStatementOutput: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatementOutput", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + executionCount: { + serializedName: "execution_count", + required: true, + type: { + name: "Number" + } + }, + data: { + serializedName: "data", + type: { + name: "any" + } + }, + errorName: { + serializedName: "ename", + type: { + name: "String" + } + }, + errorValue: { + serializedName: "evalue", + type: { + name: "String" + } + }, + traceback: { + serializedName: "traceback", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const SparkStatementOptions: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatementOptions", + modelProperties: { + code: { + serializedName: "code", + type: { + name: "String" + } + }, + kind: { + serializedName: "kind", + type: { + name: "String" + } + } + } + } +}; + +export const SparkStatementCancellationResult: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatementCancellationResult", + modelProperties: { + msg: { + serializedName: "msg", + type: { + name: "String" + } + } + } + } +}; diff --git a/sdk/synapse/synapse-spark/src/models/parameters.ts b/sdk/synapse/synapse-spark/src/models/parameters.ts new file mode 100644 index 000000000000..38736a316780 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/models/parameters.ts @@ -0,0 +1,148 @@ +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; +import { + SparkBatchJobOptions as SparkBatchJobOptionsMapper, + SparkSessionOptions as SparkSessionOptionsMapper, + SparkStatementOptions as SparkStatementOptionsMapper +} from "../models/mappers"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const livyApiVersion: OperationURLParameter = { + parameterPath: "livyApiVersion", + mapper: { + serializedName: "livyApiVersion", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const sparkPoolName: OperationURLParameter = { + parameterPath: "sparkPoolName", + mapper: { + serializedName: "sparkPoolName", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const fromParam: OperationQueryParameter = { + parameterPath: ["options", "fromParam"], + mapper: { + serializedName: "from", + type: { + name: "Number" + } + } +}; + +export const size: OperationQueryParameter = { + parameterPath: ["options", "size"], + mapper: { + serializedName: "size", + type: { + name: "Number" + } + } +}; + +export const detailed: OperationQueryParameter = { + parameterPath: ["options", "detailed"], + mapper: { + serializedName: "detailed", + type: { + name: "Boolean" + } + } +}; + +export const contentType: OperationParameter = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; + +export const sparkBatchJobOptions: OperationParameter = { + parameterPath: "sparkBatchJobOptions", + mapper: SparkBatchJobOptionsMapper +}; + +export const batchId: OperationURLParameter = { + parameterPath: "batchId", + mapper: { + serializedName: "batchId", + required: true, + type: { + name: "Number" + } + } +}; + +export const sparkSessionOptions: OperationParameter = { + parameterPath: "sparkSessionOptions", + mapper: SparkSessionOptionsMapper +}; + +export const sessionId: OperationURLParameter = { + parameterPath: "sessionId", + mapper: { + serializedName: "sessionId", + required: true, + type: { + name: "Number" + } + } +}; + +export const sparkStatementOptions: OperationParameter = { + parameterPath: "sparkStatementOptions", + mapper: SparkStatementOptionsMapper +}; + +export const statementId: OperationURLParameter = { + parameterPath: "statementId", + mapper: { + serializedName: "statementId", + required: true, + type: { + name: "Number" + } + } +}; diff --git a/sdk/synapse/synapse-spark/src/operations/index.ts b/sdk/synapse/synapse-spark/src/operations/index.ts new file mode 100644 index 000000000000..f9fb9020b68f --- /dev/null +++ b/sdk/synapse/synapse-spark/src/operations/index.ts @@ -0,0 +1,2 @@ +export * from "./sparkBatch"; +export * from "./sparkSession"; diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts new file mode 100644 index 000000000000..d9d36b0a8b52 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -0,0 +1,171 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { SparkClient } from "../sparkClient"; +import { + SparkBatchGetSparkBatchJobsOptionalParams, + SparkBatchGetSparkBatchJobsResponse, + SparkBatchJobOptions, + SparkBatchCreateSparkBatchJobOptionalParams, + SparkBatchCreateSparkBatchJobResponse, + SparkBatchGetSparkBatchJobOptionalParams, + SparkBatchGetSparkBatchJobResponse +} from "../models"; + +/** + * Class representing a SparkBatch. + */ +export class SparkBatch { + private readonly client: SparkClient; + + /** + * Initialize a new instance of the class SparkBatch class. + * @param client Reference to the service client + */ + constructor(client: SparkClient) { + this.client = client; + } + + /** + * List all spark batch jobs which are running under a particular spark pool. + * @param options The options parameters. + */ + getSparkBatchJobs( + options?: SparkBatchGetSparkBatchJobsOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkBatchJobsOperationSpec + ) as Promise; + } + + /** + * Create new spark batch job. + * @param sparkBatchJobOptions Livy compatible batch job request payload. + * @param options The options parameters. + */ + createSparkBatchJob( + sparkBatchJobOptions: SparkBatchJobOptions, + options?: SparkBatchCreateSparkBatchJobOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sparkBatchJobOptions, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + createSparkBatchJobOperationSpec + ) as Promise; + } + + /** + * Gets a single spark batch job. + * @param batchId Identifier for the batch job. + * @param options The options parameters. + */ + getSparkBatchJob( + batchId: number, + options?: SparkBatchGetSparkBatchJobOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + batchId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkBatchJobOperationSpec + ) as Promise; + } + + /** + * Cancels a running spark batch job. + * @param batchId Identifier for the batch job. + * @param options The options parameters. + */ + cancelSparkBatchJob( + batchId: number, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + batchId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + cancelSparkBatchJobOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSparkBatchJobsOperationSpec: coreHttp.OperationSpec = { + path: "/batches", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJobCollection + } + }, + queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], + headerParameters: [Parameters.accept], + serializer +}; +const createSparkBatchJobOperationSpec: coreHttp.OperationSpec = { + path: "/batches", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJob + } + }, + requestBody: Parameters.sparkBatchJobOptions, + queryParameters: [Parameters.detailed], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkBatchJobOperationSpec: coreHttp.OperationSpec = { + path: "/batches/{batchId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJob + } + }, + queryParameters: [Parameters.detailed], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.batchId + ], + headerParameters: [Parameters.accept], + serializer +}; +const cancelSparkBatchJobOperationSpec: coreHttp.OperationSpec = { + path: "/batches/{batchId}", + httpMethod: "DELETE", + responses: { 200: {} }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.batchId + ], + serializer +}; diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts new file mode 100644 index 000000000000..844d4d4331ac --- /dev/null +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -0,0 +1,364 @@ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { SparkClient } from "../sparkClient"; +import { + SparkSessionGetSparkSessionsOptionalParams, + SparkSessionGetSparkSessionsResponse, + SparkSessionOptions, + SparkSessionCreateSparkSessionOptionalParams, + SparkSessionCreateSparkSessionResponse, + SparkSessionGetSparkSessionOptionalParams, + SparkSessionGetSparkSessionResponse, + SparkSessionGetSparkStatementsResponse, + SparkStatementOptions, + SparkSessionCreateSparkStatementResponse, + SparkSessionGetSparkStatementResponse, + SparkSessionCancelSparkStatementResponse +} from "../models"; + +/** + * Class representing a SparkSession. + */ +export class SparkSession { + private readonly client: SparkClient; + + /** + * Initialize a new instance of the class SparkSession class. + * @param client Reference to the service client + */ + constructor(client: SparkClient) { + this.client = client; + } + + /** + * List all spark sessions which are running under a particular spark pool. + * @param options The options parameters. + */ + getSparkSessions( + options?: SparkSessionGetSparkSessionsOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkSessionsOperationSpec + ) as Promise; + } + + /** + * Create new spark session. + * @param sparkSessionOptions Livy compatible batch job request payload. + * @param options The options parameters. + */ + createSparkSession( + sparkSessionOptions: SparkSessionOptions, + options?: SparkSessionCreateSparkSessionOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sparkSessionOptions, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + createSparkSessionOperationSpec + ) as Promise; + } + + /** + * Gets a single spark session. + * @param sessionId Identifier for the session. + * @param options The options parameters. + */ + getSparkSession( + sessionId: number, + options?: SparkSessionGetSparkSessionOptionalParams + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sessionId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkSessionOperationSpec + ) as Promise; + } + + /** + * Cancels a running spark session. + * @param sessionId Identifier for the session. + * @param options The options parameters. + */ + cancelSparkSession( + sessionId: number, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sessionId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + cancelSparkSessionOperationSpec + ) as Promise; + } + + /** + * Sends a keep alive call to the current session to reset the session timeout. + * @param sessionId Identifier for the session. + * @param options The options parameters. + */ + resetSparkSessionTimeout( + sessionId: number, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sessionId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + resetSparkSessionTimeoutOperationSpec + ) as Promise; + } + + /** + * Gets a list of statements within a spark session. + * @param sessionId Identifier for the session. + * @param options The options parameters. + */ + getSparkStatements( + sessionId: number, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sessionId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkStatementsOperationSpec + ) as Promise; + } + + /** + * Create statement within a spark session. + * @param sessionId Identifier for the session. + * @param sparkStatementOptions Livy compatible batch job request payload. + * @param options The options parameters. + */ + createSparkStatement( + sessionId: number, + sparkStatementOptions: SparkStatementOptions, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sessionId, + sparkStatementOptions, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + createSparkStatementOperationSpec + ) as Promise; + } + + /** + * Gets a single statement within a spark session. + * @param sessionId Identifier for the session. + * @param statementId Identifier for the statement. + * @param options The options parameters. + */ + getSparkStatement( + sessionId: number, + statementId: number, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sessionId, + statementId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + getSparkStatementOperationSpec + ) as Promise; + } + + /** + * Kill a statement within a session. + * @param sessionId Identifier for the session. + * @param statementId Identifier for the statement. + * @param options The options parameters. + */ + cancelSparkStatement( + sessionId: number, + statementId: number, + options?: coreHttp.OperationOptions + ): Promise { + const operationArguments: coreHttp.OperationArguments = { + sessionId, + statementId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest( + operationArguments, + cancelSparkStatementOperationSpec + ) as Promise; + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSparkSessionsOperationSpec: coreHttp.OperationSpec = { + path: "/sessions", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkSessionCollection + } + }, + queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], + headerParameters: [Parameters.accept], + serializer +}; +const createSparkSessionOperationSpec: coreHttp.OperationSpec = { + path: "/sessions", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkSession + } + }, + requestBody: Parameters.sparkSessionOptions, + queryParameters: [Parameters.detailed], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkSessionOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkSession + } + }, + queryParameters: [Parameters.detailed], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + headerParameters: [Parameters.accept], + serializer +}; +const cancelSparkSessionOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}", + httpMethod: "DELETE", + responses: { 200: {} }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + serializer +}; +const resetSparkSessionTimeoutOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/reset-timeout", + httpMethod: "PUT", + responses: { 200: {} }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + serializer +}; +const getSparkStatementsOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/statements", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkStatementCollection + } + }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + headerParameters: [Parameters.accept], + serializer +}; +const createSparkStatementOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/statements", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkStatement + } + }, + requestBody: Parameters.sparkStatementOptions, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkStatementOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/statements/{statementId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkStatement + } + }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId, + Parameters.statementId + ], + headerParameters: [Parameters.accept], + serializer +}; +const cancelSparkStatementOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/statements/{statementId}/cancel", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkStatementCancellationResult + } + }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId, + Parameters.statementId + ], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-spark/src/sparkClient.ts b/sdk/synapse/synapse-spark/src/sparkClient.ts new file mode 100644 index 000000000000..3dbe2ecb1898 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/sparkClient.ts @@ -0,0 +1,28 @@ +import * as coreHttp from "@azure/core-http"; +import { SparkBatch, SparkSession } from "./operations"; +import { SparkClientContext } from "./sparkClientContext"; +import { SparkClientOptionalParams } from "./models"; + +export class SparkClient extends SparkClientContext { + /** + * Initializes a new instance of the SparkClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param sparkPoolName Name of the spark pool. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + sparkPoolName: string, + options?: SparkClientOptionalParams + ) { + super(credentials, endpoint, sparkPoolName, options); + this.sparkBatch = new SparkBatch(this); + this.sparkSession = new SparkSession(this); + } + + sparkBatch: SparkBatch; + sparkSession: SparkSession; +} diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts new file mode 100644 index 000000000000..52587e47d3ef --- /dev/null +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -0,0 +1,61 @@ +import * as coreHttp from "@azure/core-http"; +import { SparkClientOptionalParams } from "./models"; + +const packageName = "@azure/synapse-spark"; +const packageVersion = "1.0.0"; + +export class SparkClientContext extends coreHttp.ServiceClient { + endpoint: string; + livyApiVersion: string; + sparkPoolName: string; + + /** + * Initializes a new instance of the SparkClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param sparkPoolName Name of the spark pool. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + sparkPoolName: string, + options?: SparkClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + if (sparkPoolName === undefined) { + throw new Error("'sparkPoolName' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = + options.endpoint || + "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; + + // Parameter assignments + this.endpoint = endpoint; + this.sparkPoolName = sparkPoolName; + + // Assigning values to Constant parameters + this.livyApiVersion = options.livyApiVersion || "2019-11-01-preview"; + } +} diff --git a/sdk/synapse/synapse-spark/tsconfig.json b/sdk/synapse/synapse-spark/tsconfig.json new file mode 100644 index 000000000000..0290d6707a44 --- /dev/null +++ b/sdk/synapse/synapse-spark/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} From cbaf56348ac6151ead82c3b9e4837c2551d1bd4b Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Mon, 30 Nov 2020 13:59:23 +1300 Subject: [PATCH 03/28] WIP --- sdk/synapse/synapse-accesscontrol/README.md | 3 +- .../synapse-accesscontrol/package.json | 12 +++- .../src/accessControlClient.ts | 6 +- .../synapse-accesscontrol/src/models/index.ts | 3 +- sdk/synapse/synapse-artifacts/README.md | 3 +- sdk/synapse/synapse-artifacts/package.json | 12 +++- .../src/lro/azureAsyncOperationStrategy.ts | 42 +++-------- .../src/lro/bodyPollingStrategy.ts | 5 +- .../src/lro/locationStrategy.ts | 7 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 8 +-- .../synapse-artifacts/src/lro/lroPoller.ts | 27 ++------ .../synapse-artifacts/src/lro/models.ts | 8 +-- .../synapse-artifacts/src/lro/operation.ts | 4 +- .../synapse-artifacts/src/lro/requestUtils.ts | 16 +---- .../synapse-artifacts/src/models/index.ts | 69 ++++++------------- .../synapse-artifacts/src/models/mappers.ts | 46 ++++--------- .../src/operations/bigDataPools.ts | 14 ++-- .../src/operations/dataFlow.ts | 34 ++------- .../src/operations/dataFlowDebugSession.ts | 39 +++-------- .../src/operations/dataset.ts | 41 +++-------- .../src/operations/integrationRuntimes.ts | 23 +++---- .../src/operations/linkedService.ts | 38 +++------- .../src/operations/notebook.ts | 43 +++--------- .../src/operations/pipeline.ts | 34 ++------- .../src/operations/pipelineRun.ts | 6 +- .../src/operations/sparkJobDefinition.ts | 38 +++------- .../src/operations/sqlPools.ts | 19 ++--- .../src/operations/sqlScript.ts | 20 ++---- .../src/operations/trigger.ts | 60 ++++------------ .../src/operations/triggerRun.ts | 17 +---- .../src/operations/workspace.ts | 7 +- .../operations/workspaceGitRepoManagement.ts | 6 +- .../synapse-managed-endpoints/README.md | 3 +- .../synapse-managed-endpoints/package.json | 12 +++- .../src/models/index.ts | 3 +- .../src/operations/managedPrivateEndpoints.ts | 55 +++++---------- sdk/synapse/synapse-monitoring/README.md | 3 +- sdk/synapse/synapse-monitoring/package.json | 17 ++++- .../synapse-monitoring/src/models/index.ts | 9 +-- .../src/operations/monitoring.ts | 7 +- sdk/synapse/synapse-spark/README.md | 3 +- sdk/synapse/synapse-spark/package.json | 17 ++++- sdk/synapse/synapse-spark/src/models/index.ts | 21 ++---- .../src/operations/sparkBatch.ts | 12 +--- .../src/operations/sparkSession.ts | 12 +--- .../synapse-spark/src/sparkClientContext.ts | 3 +- 46 files changed, 264 insertions(+), 623 deletions(-) diff --git a/sdk/synapse/synapse-accesscontrol/README.md b/sdk/synapse/synapse-accesscontrol/README.md index 5da1483fc507..d85737af8843 100644 --- a/sdk/synapse/synapse-accesscontrol/README.md +++ b/sdk/synapse/synapse-accesscontrol/README.md @@ -23,5 +23,4 @@ Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) - -![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) diff --git a/sdk/synapse/synapse-accesscontrol/package.json b/sdk/synapse/synapse-accesscontrol/package.json index 665dac753508..e807162d24b7 100644 --- a/sdk/synapse/synapse-accesscontrol/package.json +++ b/sdk/synapse/synapse-accesscontrol/package.json @@ -8,7 +8,13 @@ "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" }, - "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "isomorphic" + ], "license": "MIT", "main": "./dist/synapse-accesscontrol.js", "module": "./esm/index.js", @@ -26,7 +32,9 @@ "type": "git", "url": "https://github.com/Azure/azure-sdk-for-js.git" }, - "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, "files": [ "dist/**/*.js", "dist/**/*.js.map", diff --git a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts index 0a7ff0c8a2e3..61aeccd43e68 100644 --- a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts +++ b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts @@ -291,11 +291,7 @@ const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [ - Parameters.apiVersion, - Parameters.roleId1, - Parameters.principalId - ], + queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.continuationToken], serializer diff --git a/sdk/synapse/synapse-accesscontrol/src/models/index.ts b/sdk/synapse/synapse-accesscontrol/src/models/index.ts index e651fb487f46..8dc93686ed46 100644 --- a/sdk/synapse/synapse-accesscontrol/src/models/index.ts +++ b/sdk/synapse/synapse-accesscontrol/src/models/index.ts @@ -269,8 +269,7 @@ export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListRespons /** * Optional parameters. */ -export interface AccessControlClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/README.md b/sdk/synapse/synapse-artifacts/README.md index 165d8e954429..e41467ded6dc 100644 --- a/sdk/synapse/synapse-artifacts/README.md +++ b/sdk/synapse/synapse-artifacts/README.md @@ -23,5 +23,4 @@ Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) - -![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index eac600ab121c..56d45b88fb53 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -9,7 +9,13 @@ "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" }, - "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "isomorphic" + ], "license": "MIT", "main": "./dist/synapse-artifacts.js", "module": "./esm/index.js", @@ -27,7 +33,9 @@ "type": "git", "url": "https://github.com/Azure/azure-sdk-for-js.git" }, - "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, "files": [ "dist/**/*.js", "dist/**/*.js.map", diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index 0b5d4232590f..3c2fd2e5d6c9 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -6,11 +6,7 @@ import { FinalStateVia, LROSYM } from "./models"; -import { - OperationSpec, - OperationArguments, - OperationResponse -} from "@azure/core-http"; +import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; import { terminalStates } from "./constants"; import { SendOperationFn } from "."; @@ -21,14 +17,11 @@ export function createAzureAsyncOperationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; - let lastKnownPollingUrl = - lroData.azureAsyncOperation || lroData.operationLocation; + let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; return { isTerminal: () => { @@ -59,17 +52,12 @@ export function createAzureAsyncOperationStrategy( const initialOperationResult = initialOperation.result._response[LROSYM]; const currentOperationResult = currentOperation.result._response[LROSYM]; - if ( - !shouldPerformFinalGet(initialOperationResult, currentOperationResult) - ) { + if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { return currentOperation; } if (initialOperationResult?.requestMethod === "PUT") { - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; } @@ -77,29 +65,20 @@ export function createAzureAsyncOperationStrategy( if (initialOperationResult?.location) { switch (finalStateVia) { case "original-uri": - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; case "azure-async-operation": return currentOperation; case "location": default: - const location = - initialOperationResult.location || - currentOperationResult?.location; + const location = initialOperationResult.location || currentOperationResult?.location; if (!location) { throw new Error("Couldn't determine final GET URL from location"); } - return await sendFinalGet( - initialOperation, - sendOperationFn, - location - ); + return await sendFinalGet(initialOperation, sendOperationFn, location); } } @@ -177,10 +156,7 @@ function getCompositeMappers(responses: { }, {} as { [responseCode: string]: OperationResponse }); } -function shouldPerformFinalGet( - initialResult?: LROResponseInfo, - currentResult?: LROResponseInfo -) { +function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { const { status } = currentResult || {}; const { requestMethod: initialRequestMethod, location } = initialResult || {}; if (status && status.toLowerCase() !== "succeeded") { diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index 62ed188e691e..49333c25b430 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -44,10 +44,7 @@ export function createBodyPollingStrategy( }; // Execute the polling operation - initialOperation.result = await sendOperation( - initialOperation.args, - pollingSpec - ); + initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); return initialOperation; } }; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index cfcfa8efd0a7..008f60177503 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -8,9 +8,7 @@ export function createLocationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; @@ -51,8 +49,7 @@ export function createLocationStrategy( const result = await sendOperationFn(pollingArgs, pollingSpec); // Update latest polling url - lastKnownPollingUrl = - result._response[LROSYM]?.location || lastKnownPollingUrl; + lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; // Update lastOperation result currentOperation = { diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index e686401a5cf9..1e1ec61db3b3 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -21,12 +21,8 @@ class LROPolicy extends BaseRequestPolicy { super(nextPolicy, options); } - public async sendRequest( - webResource: WebResource - ): Promise { - let result: LROOperationResponse = await this._nextPolicy.sendRequest( - webResource - ); + public async sendRequest(webResource: WebResource): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); const _lroData = getLROData(result); result[LROSYM] = _lroData; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index 72d979bd46a9..9ab5f25fb49a 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -1,17 +1,6 @@ import { Poller } from "@azure/core-lro"; -import { - OperationSpec, - OperationArguments, - delay, - RestError -} from "@azure/core-http"; -import { - BaseResult, - LROOperationState, - LROOperationStep, - FinalStateVia, - LROSYM -} from "./models"; +import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; +import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; import { makeOperation } from "./operation"; import { createBodyPollingStrategy } from "./bodyPollingStrategy"; import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; @@ -70,11 +59,7 @@ export class LROPoller extends Poller< result: initialOperationResult }; - const pollingStrategy = getPollingStrategy( - initialOperation, - sendOperation, - finalStateVia - ); + const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); const state: LROOperationState = { // Initial operation will become the last operation @@ -127,11 +112,7 @@ function getPollingStrategy( } if (lroData.azureAsyncOperation || lroData.operationLocation) { - return createAzureAsyncOperationStrategy( - initialOperation, - sendOperationFn, - finalStateVia - ); + return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); } if (lroData.location) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index ed0dd9132876..704c731cf370 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -8,10 +8,7 @@ import { import { PollOperationState, PollOperation } from "@azure/core-lro"; export const LROSYM = Symbol("LROData"); -export type FinalStateVia = - | "azure-async-operation" - | "location" - | "original-uri"; +export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; export interface LROResponseInfo { requestMethod: HttpMethods; @@ -47,8 +44,7 @@ export interface LROOperationStep { result: TResult; } -export interface LROOperationState - extends PollOperationState { +export interface LROOperationState extends PollOperationState { lastOperation: LROOperationStep; initialOperation: LROOperationStep; pollingStrategy: LROStrategy; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index 9b37277e7b2a..e1e3fa2e1126 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -41,9 +41,7 @@ async function update( const currentLroData = currentResponse.result._response[LROSYM]; if (!currentLroData) { - throw new Error( - "Expected lroData to be defined for updating LRO operation" - ); + throw new Error("Expected lroData to be defined for updating LRO operation"); } if (state.result) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index e3289b95905b..3c518804edec 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -21,17 +21,10 @@ export function shouldDeserializeLRO(finalStateVia?: string) { isInitialRequest = false; } - if ( - initialOperationInfo.azureAsyncOperation || - initialOperationInfo.operationLocation - ) { + if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { return ( !isInitialRequest && - isAsyncOperationFinalResponse( - response, - initialOperationInfo, - finalStateVia - ) + isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) ); } @@ -69,10 +62,7 @@ function isAsyncOperationFinalResponse( return true; } - if ( - initialOperationInfo.requestMethod !== "PUT" && - !initialOperationInfo.location - ) { + if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { return true; } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 48df2ee3a27a..5389dc744b1e 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -201,9 +201,7 @@ export type TriggerUnion = | TumblingWindowTrigger | ChainingTrigger; export type DataFlowUnion = MappingDataFlow; -export type IntegrationRuntimeUnion = - | ManagedIntegrationRuntime - | SelfHostedIntegrationRuntime; +export type IntegrationRuntimeUnion = ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; export type SecretBaseUnion = SecureString | AzureKeyVaultSecretReference; export type DatasetLocationUnion = | AzureBlobStorageLocation @@ -352,10 +350,7 @@ export type ExecutionActivityUnion = | DatabricksSparkPythonActivity | AzureFunctionActivity | ExecuteDataFlowActivity; -export type MultiplePipelineTriggerUnion = - | ScheduleTrigger - | BlobTrigger - | BlobEventsTrigger; +export type MultiplePipelineTriggerUnion = ScheduleTrigger | BlobTrigger | BlobEventsTrigger; export type TabularSourceUnion = | AzureTableSource | InformixSource @@ -3134,12 +3129,7 @@ export interface DatasetStorageFormat { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "TextFormat" - | "JsonFormat" - | "AvroFormat" - | "OrcFormat" - | "ParquetFormat"; + type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -3329,10 +3319,7 @@ export interface FormatWriteSettings { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "AvroWriteSettings" - | "DelimitedTextWriteSettings" - | "JsonWriteSettings"; + type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -14607,8 +14594,7 @@ export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceReso /** * Optional parameters. */ -export interface LinkedServiceGetLinkedServiceOptionalParams - extends coreHttp.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { /** * ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14678,8 +14664,7 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { /** * Optional parameters. */ -export interface DatasetCreateOrUpdateDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14713,8 +14698,7 @@ export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { /** * Optional parameters. */ -export interface DatasetGetDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14784,8 +14768,7 @@ export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { /** * Optional parameters. */ -export interface PipelineCreateOrUpdatePipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14819,8 +14802,7 @@ export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineGetPipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14850,8 +14832,7 @@ export type PipelineGetPipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineCreatePipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { /** * Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ @@ -14973,8 +14954,7 @@ export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { /** * Optional parameters. */ -export interface PipelineRunCancelPipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { /** * If true, cancel all the Child pipelines that are triggered by the current pipeline. */ @@ -15004,8 +14984,7 @@ export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { /** * Optional parameters. */ -export interface TriggerCreateOrUpdateTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15039,8 +15018,7 @@ export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { /** * Optional parameters. */ -export interface TriggerGetTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15178,8 +15156,7 @@ export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResp /** * Optional parameters. */ -export interface DataFlowCreateOrUpdateDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15213,8 +15190,7 @@ export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { /** * Optional parameters. */ -export interface DataFlowGetDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15412,8 +15388,7 @@ export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & /** * Optional parameters. */ -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { /** * ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15443,8 +15418,7 @@ export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { /** * Optional parameters. */ -export interface SqlScriptGetSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { /** * ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15684,8 +15658,7 @@ export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse /** * Optional parameters. */ -export interface NotebookCreateOrUpdateNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { /** * ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15719,8 +15692,7 @@ export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { /** * Optional parameters. */ -export interface NotebookGetNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { /** * ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15961,8 +15933,7 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces /** * Optional parameters. */ -export interface ArtifactsClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index 6335e25515f9..8132ad51a281 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -14802,8 +14802,7 @@ export const TextFormat: coreHttp.CompositeMapper = { className: "TextFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, columnDelimiter: { @@ -14871,8 +14870,7 @@ export const JsonFormat: coreHttp.CompositeMapper = { className: "JsonFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, filePattern: { @@ -14916,8 +14914,7 @@ export const AvroFormat: coreHttp.CompositeMapper = { className: "AvroFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14931,8 +14928,7 @@ export const OrcFormat: coreHttp.CompositeMapper = { className: "OrcFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14946,8 +14942,7 @@ export const ParquetFormat: coreHttp.CompositeMapper = { className: "ParquetFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -15034,8 +15029,7 @@ export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebAnonymousAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties } @@ -15048,8 +15042,7 @@ export const WebBasicAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebBasicAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, username: { @@ -15076,8 +15069,7 @@ export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebClientCertificateAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, pfx: { @@ -17691,9 +17683,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap ...DependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -17705,9 +17695,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap }, size: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -17726,8 +17714,7 @@ export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeKeyAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, key: { @@ -17747,8 +17734,7 @@ export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeRbacAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, resourceId: { @@ -21054,9 +21040,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper ...TriggerDependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -21067,9 +21051,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper }, size: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 977767e2578f..3eab86fcd719 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -26,10 +26,9 @@ export class BigDataPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + BigDataPoolsListResponse + >; } /** @@ -45,10 +44,9 @@ export class BigDataPools { bigDataPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + BigDataPoolsGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index 3dffca96470b..010e6f89c007 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -57,10 +57,7 @@ export class DataFlow { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDataFlowsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -90,10 +87,7 @@ export class DataFlow { dataFlow, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowCreateOrUpdateDataFlowResponse >; @@ -143,13 +137,8 @@ export class DataFlow { dataFlowName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -180,13 +169,8 @@ export class DataFlow { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -276,11 +260,7 @@ const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataFlow, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index f0e17e80dafd..f84fba997bfa 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -59,10 +59,7 @@ export class DataFlowDebugSession { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._queryDataFlowDebugSessionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -71,9 +68,7 @@ export class DataFlowDebugSession { private async *queryDataFlowDebugSessionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( - options - )) { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { yield* page; } } @@ -86,17 +81,12 @@ export class DataFlowDebugSession { async createDataFlowDebugSession( request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions - ): Promise< - LROPoller - > { + ): Promise> { const operationArguments: coreHttp.OperationArguments = { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionCreateDataFlowDebugSessionResponse >; @@ -120,18 +110,14 @@ export class DataFlowDebugSession { */ private _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - > { + ): Promise { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceOperationSpec - ) as Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - >; + ) as Promise; } /** @@ -185,10 +171,7 @@ export class DataFlowDebugSession { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionExecuteCommandResponse >; @@ -215,9 +198,7 @@ export class DataFlowDebugSession { private _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - > { + ): Promise { const operationArguments: coreHttp.OperationArguments = { nextLink, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -225,9 +206,7 @@ export class DataFlowDebugSession { return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceNextOperationSpec - ) as Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - >; + ) as Promise; } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 0cef4925e7c0..294e0e457bd6 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -57,10 +57,7 @@ export class Dataset { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDatasetsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDatasetsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -106,10 +103,7 @@ export class Dataset { dataset, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DatasetCreateOrUpdateDatasetResponse >; @@ -140,10 +134,9 @@ export class Dataset { datasetName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getDatasetOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getDatasetOperationSpec) as Promise< + DatasetGetDatasetResponse + >; } /** @@ -159,13 +152,8 @@ export class Dataset { datasetName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -196,13 +184,8 @@ export class Dataset { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -291,11 +274,7 @@ const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.datasetName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index 4736ccb3746a..9a8962a761fc 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -2,10 +2,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { - IntegrationRuntimesListResponse, - IntegrationRuntimesGetResponse -} from "../models"; +import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; /** * Class representing a IntegrationRuntimes. @@ -25,16 +22,13 @@ export class IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - list( - options?: coreHttp.OperationOptions - ): Promise { + list(options?: coreHttp.OperationOptions): Promise { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + IntegrationRuntimesListResponse + >; } /** @@ -50,10 +44,9 @@ export class IntegrationRuntimes { integrationRuntimeName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + IntegrationRuntimesGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 35436c92ba04..644f8913310e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -57,10 +57,7 @@ export class LinkedService { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getLinkedServicesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -69,9 +66,7 @@ export class LinkedService { private async *getLinkedServicesByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getLinkedServicesByWorkspacePagingPage( - options - )) { + for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { yield* page; } } @@ -108,10 +103,7 @@ export class LinkedService { linkedService, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< LinkedServiceCreateOrUpdateLinkedServiceResponse >; @@ -161,13 +153,8 @@ export class LinkedService { linkedServiceName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -198,13 +185,8 @@ export class LinkedService { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -294,11 +276,7 @@ const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.linkedService, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index df003d827a91..6bc39f71c116 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -59,10 +59,7 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebooksByWorkspaceNext( - continuationToken, - options - ); + result = await this._getNotebooksByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -104,10 +101,7 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebookSummaryByWorkSpaceNext( - continuationToken, - options - ); + result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -116,9 +110,7 @@ export class Notebook { private async *getNotebookSummaryByWorkSpacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( - options - )) { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { yield* page; } } @@ -171,10 +163,7 @@ export class Notebook { notebook, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< NotebookCreateOrUpdateNotebookResponse >; @@ -224,13 +213,8 @@ export class Notebook { notebookName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -261,13 +245,8 @@ export class Notebook { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -393,11 +372,7 @@ const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.notebook, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.notebookName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index 87c49bd6c9ae..b481a9d165eb 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -59,10 +59,7 @@ export class Pipeline { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getPipelinesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getPipelinesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -108,10 +105,7 @@ export class Pipeline { pipeline, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< PipelineCreateOrUpdatePipelineResponse >; @@ -161,13 +155,8 @@ export class Pipeline { pipelineName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -198,13 +187,8 @@ export class Pipeline { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -313,11 +297,7 @@ const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.pipeline, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.pipelineName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 9c2c3370e5cc..240337eb9067 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -157,11 +157,7 @@ const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.filterParameters, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.pipelineName, - Parameters.runId - ], + urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index 0604bdeeaf1b..b7510b8e133f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -59,10 +59,7 @@ export class SparkJobDefinition { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSparkJobDefinitionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -71,9 +68,7 @@ export class SparkJobDefinition { private async *getSparkJobDefinitionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( - options - )) { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { yield* page; } } @@ -167,10 +162,7 @@ export class SparkJobDefinition { sparkJobDefinitionName, options: this.getOperationOptions(options, "location") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionExecuteSparkJobDefinitionResponse >; @@ -205,13 +197,8 @@ export class SparkJobDefinition { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -239,10 +226,7 @@ export class SparkJobDefinition { sparkJobDefinitionAzureResource, options: this.getOperationOptions(options, "location") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionDebugSparkJobDefinitionResponse >; @@ -278,9 +262,7 @@ export class SparkJobDefinition { return this.client.sendOperationRequest( operationArguments, getSparkJobDefinitionsByWorkspaceNextOperationSpec - ) as Promise< - SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse - >; + ) as Promise; } private getOperationOptions( @@ -329,11 +311,7 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sparkJobDefinition, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index dc58292e2873..375608469931 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -26,10 +26,9 @@ export class SqlPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + SqlPoolsListResponse + >; } /** @@ -37,18 +36,14 @@ export class SqlPools { * @param sqlPoolName The Sql Pool name * @param options The options parameters. */ - get( - sqlPoolName: string, - options?: coreHttp.OperationOptions - ): Promise { + get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise { const operationArguments: coreHttp.OperationArguments = { sqlPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + SqlPoolsGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index d3e08c1f3ea1..8db16a32af20 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -57,10 +57,7 @@ export class SqlScript { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSqlScriptsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -166,13 +163,8 @@ export class SqlScript { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -253,11 +245,7 @@ const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sqlScript, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index 4f333b059259..c9ae4ffdeb84 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -59,10 +59,7 @@ export class Trigger { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getTriggersByWorkspaceNext( - continuationToken, - options - ); + result = await this._getTriggersByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -108,10 +105,7 @@ export class Trigger { trigger, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerCreateOrUpdateTriggerResponse >; @@ -142,10 +136,9 @@ export class Trigger { triggerName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getTriggerOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getTriggerOperationSpec) as Promise< + TriggerGetTriggerResponse + >; } /** @@ -161,13 +154,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -195,10 +183,7 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerSubscribeTriggerToEventsResponse >; @@ -248,10 +233,7 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerUnsubscribeTriggerFromEventsResponse >; @@ -282,13 +264,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -316,13 +293,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -411,11 +383,7 @@ const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.trigger, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.triggerName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index 20e37cfac269..f64442937aa7 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -2,10 +2,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { - RunFilterParameters, - TriggerRunQueryTriggerRunsByWorkspaceResponse -} from "../models"; +import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; /** * Class representing a TriggerRun. @@ -98,11 +95,7 @@ const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; @@ -116,11 +109,7 @@ const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 4c4453399fb4..9fc379060c1e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -26,10 +26,9 @@ export class Workspace { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + WorkspaceGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 058888c80532..550bd2db8014 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -56,11 +56,7 @@ const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.gitHubAccessTokenRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.clientRequestId - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-managed-endpoints/README.md b/sdk/synapse/synapse-managed-endpoints/README.md index d4c47ecde213..facf70ce6862 100644 --- a/sdk/synapse/synapse-managed-endpoints/README.md +++ b/sdk/synapse/synapse-managed-endpoints/README.md @@ -23,5 +23,4 @@ Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) - -![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) diff --git a/sdk/synapse/synapse-managed-endpoints/package.json b/sdk/synapse/synapse-managed-endpoints/package.json index 7e06a686ad0f..867165784976 100644 --- a/sdk/synapse/synapse-managed-endpoints/package.json +++ b/sdk/synapse/synapse-managed-endpoints/package.json @@ -8,7 +8,13 @@ "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" }, - "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "isomorphic" + ], "license": "MIT", "main": "./dist/synapse-managed-endpoints.js", "module": "./esm/index.js", @@ -26,7 +32,9 @@ "type": "git", "url": "https://github.com/Azure/azure-sdk-for-js.git" }, - "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, "files": [ "dist/**/*.js", "dist/**/*.js.map", diff --git a/sdk/synapse/synapse-managed-endpoints/src/models/index.ts b/sdk/synapse/synapse-managed-endpoints/src/models/index.ts index c8a5ef476bf7..cae6cb3ab10e 100644 --- a/sdk/synapse/synapse-managed-endpoints/src/models/index.ts +++ b/sdk/synapse/synapse-managed-endpoints/src/models/index.ts @@ -170,8 +170,7 @@ export type ManagedPrivateEndpointsListNextResponse = ManagedPrivateEndpointList /** * Optional parameters. */ -export interface ManagedPrivateEndpointsClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface ManagedPrivateEndpointsClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts index d354c10f3bf2..123c22c9a279 100644 --- a/sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts +++ b/sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts @@ -56,11 +56,7 @@ export class ManagedPrivateEndpoints { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._listNext( - managedVirtualNetworkName, - continuationToken, - options - ); + result = await this._listNext(managedVirtualNetworkName, continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -70,10 +66,7 @@ export class ManagedPrivateEndpoints { managedVirtualNetworkName: string, options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.listPagingPage( - managedVirtualNetworkName, - options - )) { + for await (const page of this.listPagingPage(managedVirtualNetworkName, options)) { yield* page; } } @@ -94,10 +87,9 @@ export class ManagedPrivateEndpoints { managedPrivateEndpointName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + ManagedPrivateEndpointsGetResponse + >; } /** @@ -119,10 +111,9 @@ export class ManagedPrivateEndpoints { managedPrivateEndpoint, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - createOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, createOperationSpec) as Promise< + ManagedPrivateEndpointsCreateResponse + >; } /** @@ -141,10 +132,9 @@ export class ManagedPrivateEndpoints { managedPrivateEndpointName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - deleteOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec) as Promise< + coreHttp.RestResponse + >; } /** @@ -160,10 +150,9 @@ export class ManagedPrivateEndpoints { managedVirtualNetworkName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + ManagedPrivateEndpointsListResponse + >; } /** @@ -182,10 +171,9 @@ export class ManagedPrivateEndpoints { nextLink, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listNextOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listNextOperationSpec) as Promise< + ManagedPrivateEndpointsListNextResponse + >; } } // Operation Specifications @@ -244,8 +232,7 @@ const deleteOperationSpec: coreHttp.OperationSpec = { serializer }; const listOperationSpec: coreHttp.OperationSpec = { - path: - "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", + path: "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", httpMethod: "GET", responses: { 200: { @@ -266,11 +253,7 @@ const listNextOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.managedVirtualNetworkName, - Parameters.nextLink - ], + urlParameters: [Parameters.endpoint, Parameters.managedVirtualNetworkName, Parameters.nextLink], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-monitoring/README.md b/sdk/synapse/synapse-monitoring/README.md index 28921720d464..eaf72b834c79 100644 --- a/sdk/synapse/synapse-monitoring/README.md +++ b/sdk/synapse/synapse-monitoring/README.md @@ -23,5 +23,4 @@ Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) - -![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index 46e520b0d243..9a40bac7ef77 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -3,8 +3,17 @@ "author": "Microsoft Corporation", "description": "A generated SDK for MonitoringClient.", "version": "1.0.0", - "dependencies": { "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" }, - "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "dependencies": { + "@azure/core-http": "^1.2.0", + "tslib": "^2.0.0" + }, + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "isomorphic" + ], "license": "MIT", "main": "./dist/synapse-monitoring.js", "module": "./esm/index.js", @@ -22,7 +31,9 @@ "type": "git", "url": "https://github.com/Azure/azure-sdk-for-js.git" }, - "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, "files": [ "dist/**/*.js", "dist/**/*.js.map", diff --git a/sdk/synapse/synapse-monitoring/src/models/index.ts b/sdk/synapse/synapse-monitoring/src/models/index.ts index 4d28685ede7a..9d8c26fa69ed 100644 --- a/sdk/synapse/synapse-monitoring/src/models/index.ts +++ b/sdk/synapse/synapse-monitoring/src/models/index.ts @@ -30,8 +30,7 @@ export interface SqlQueryStringDataModel { /** * Optional parameters. */ -export interface MonitoringGetSparkJobListOptionalParams - extends coreHttp.OperationOptions { +export interface MonitoringGetSparkJobListOptionalParams extends coreHttp.OperationOptions { /** * Can provide a guid, which is helpful for debugging and to provide better customer support */ @@ -61,8 +60,7 @@ export type MonitoringGetSparkJobListResponse = SparkJobListViewResponse & { /** * Optional parameters. */ -export interface MonitoringGetSqlJobQueryStringOptionalParams - extends coreHttp.OperationOptions { +export interface MonitoringGetSqlJobQueryStringOptionalParams extends coreHttp.OperationOptions { /** * Can provide a guid, which is helpful for debugging and to provide better customer support */ @@ -95,8 +93,7 @@ export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { /** * Optional parameters. */ -export interface MonitoringClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface MonitoringClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts index 53f2223463d5..9c82f427c63b 100644 --- a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts +++ b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts @@ -80,12 +80,7 @@ const getSqlJobQueryStringOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.SqlQueryStringDataModel } }, - queryParameters: [ - Parameters.apiVersion, - Parameters.filter, - Parameters.orderby, - Parameters.skip - ], + queryParameters: [Parameters.apiVersion, Parameters.filter, Parameters.orderby, Parameters.skip], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.xMsClientRequestId], serializer diff --git a/sdk/synapse/synapse-spark/README.md b/sdk/synapse/synapse-spark/README.md index 144dbf767bd7..3eb104f23062 100644 --- a/sdk/synapse/synapse-spark/README.md +++ b/sdk/synapse/synapse-spark/README.md @@ -23,5 +23,4 @@ Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) - -![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) \ No newline at end of file +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 67d5c1cc9ac7..6ac3a2bf5fbf 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -3,8 +3,17 @@ "author": "Microsoft Corporation", "description": "A generated SDK for SparkClient.", "version": "1.0.0", - "dependencies": { "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" }, - "keywords": ["node", "azure", "typescript", "browser", "isomorphic"], + "dependencies": { + "@azure/core-http": "^1.2.0", + "tslib": "^2.0.0" + }, + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "isomorphic" + ], "license": "MIT", "main": "./dist/synapse-spark.js", "module": "./esm/index.js", @@ -22,7 +31,9 @@ "type": "git", "url": "https://github.com/Azure/azure-sdk-for-js.git" }, - "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, "files": [ "dist/**/*.js", "dist/**/*.js.map", diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts index c388e24dc015..8f7ea475c8cf 100644 --- a/sdk/synapse/synapse-spark/src/models/index.ts +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -465,8 +465,7 @@ export type SparkStatementLanguageType = string; /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobsOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -505,8 +504,7 @@ export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { /** * Optional parameters. */ -export interface SparkBatchCreateSparkBatchJobOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -536,8 +534,7 @@ export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -567,8 +564,7 @@ export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionsOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -607,8 +603,7 @@ export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { /** * Optional parameters. */ -export interface SparkSessionCreateSparkSessionOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -638,8 +633,7 @@ export type SparkSessionCreateSparkSessionResponse = SparkSession & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -749,8 +743,7 @@ export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellatio /** * Optional parameters. */ -export interface SparkClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Valid api-version for the request. */ diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts index d9d36b0a8b52..0a8b12b48eae 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -112,11 +112,7 @@ const getSparkBatchJobsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept], serializer }; @@ -130,11 +126,7 @@ const createSparkBatchJobOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkBatchJobOptions, queryParameters: [Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts index 844d4d4331ac..60decb5b9384 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -221,11 +221,7 @@ const getSparkSessionsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept], serializer }; @@ -239,11 +235,7 @@ const createSparkSessionOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkSessionOptions, queryParameters: [Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts index 52587e47d3ef..0c01469a88a2 100644 --- a/sdk/synapse/synapse-spark/src/sparkClientContext.ts +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -48,8 +48,7 @@ export class SparkClientContext extends coreHttp.ServiceClient { this.requestContentType = "application/json; charset=utf-8"; this.baseUri = - options.endpoint || - "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; + options.endpoint || "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; // Parameter assignments this.endpoint = endpoint; From 3d949ac14fadbaaca4de4faebe7e2a23d3ae6d6e Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 1 Dec 2020 07:48:01 +1300 Subject: [PATCH 04/28] WIP --- sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md index ae28e08cd661..dcbaaa3c40b0 100644 --- a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -6612,7 +6612,7 @@ export type ZohoSource = TabularSource & { // Warnings were encountered during analysis: // -// src/models/index.ts:15209:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts +// src/models/index.ts:15186:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts // (No @packageDocumentation comment for this package) From a2325dfe5eff9654f3ac6d57ea20c2d191add25b Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 1 Dec 2020 08:19:31 +1300 Subject: [PATCH 05/28] Add last two to workspace --- dataplane.code-workspace | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dataplane.code-workspace b/dataplane.code-workspace index 9915295a241d..866851e4b749 100644 --- a/dataplane.code-workspace +++ b/dataplane.code-workspace @@ -144,6 +144,14 @@ "name": "synapse-artifacts", "path": "sdk/synapse/synapse-artifacts" }, + { + "name": "synapse-managed-endpoints", + "path": "sdk/synapse/synapse-managed-endpoints" + }, + { + "name": "synapse-monitoring", + "path": "sdk/synapse/synapse-monitoring" + }, { "name": "synapse-spark", "path": "sdk/synapse/synapse-spark" From 9fffefc9354373627ef8a2e8531d7bf4d9b29466 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 1 Dec 2020 09:41:25 +1300 Subject: [PATCH 06/28] Add basic READMEs and test builds --- sdk/synapse/synapse-accesscontrol/README.md | 43 ++++++++++++++----- .../synapse-accesscontrol/package.json | 1 + sdk/synapse/synapse-artifacts/README.md | 43 ++++++++++++++----- sdk/synapse/synapse-artifacts/package.json | 1 + .../synapse-managed-endpoints/README.md | 43 ++++++++++++++----- .../synapse-managed-endpoints/package.json | 1 + sdk/synapse/synapse-monitoring/README.md | 43 ++++++++++++++----- sdk/synapse/synapse-monitoring/package.json | 1 + sdk/synapse/synapse-spark/README.md | 39 +++++++++++++---- sdk/synapse/synapse-spark/package.json | 1 + 10 files changed, 164 insertions(+), 52 deletions(-) diff --git a/sdk/synapse/synapse-accesscontrol/README.md b/sdk/synapse/synapse-accesscontrol/README.md index d85737af8843..14eb381ce99f 100644 --- a/sdk/synapse/synapse-accesscontrol/README.md +++ b/sdk/synapse/synapse-accesscontrol/README.md @@ -1,26 +1,47 @@ -## Azure AccessControlClient SDK for JavaScript +## Azure Synapse Access Control client library for JavaScript -This package contains an isomorphic SDK for AccessControlClient. +This package contains an isomorphic SDK for SparkClient. -### Currently supported environments - -- Node.js version 8.x.x or higher -- Browser JavaScript - -### How to Install +## Getting started +### Install the package ```bash npm install @azure/synapse-accesscontrol ``` -### How to use +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts -#### Sample code +## Examples -Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. +In the future, we will share samples here. ## Related projects - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) ![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-accesscontrol/package.json b/sdk/synapse/synapse-accesscontrol/package.json index e807162d24b7..ec101bdc20b8 100644 --- a/sdk/synapse/synapse-accesscontrol/package.json +++ b/sdk/synapse/synapse-accesscontrol/package.json @@ -53,6 +53,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-accesscontrol.js.map'\" -o ./dist/synapse-accesscontrol.min.js ./dist/synapse-accesscontrol.js", "prepack": "npm install && npm run build", + "build:test": "echo skip", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-artifacts/README.md b/sdk/synapse/synapse-artifacts/README.md index e41467ded6dc..f0cc2fb5b6cf 100644 --- a/sdk/synapse/synapse-artifacts/README.md +++ b/sdk/synapse/synapse-artifacts/README.md @@ -1,26 +1,47 @@ -## Azure ArtifactsClient SDK for JavaScript +## Azure Synapse Artifacts client library for JavaScript -This package contains an isomorphic SDK for ArtifactsClient. +This package contains an isomorphic SDK for SparkClient. -### Currently supported environments - -- Node.js version 8.x.x or higher -- Browser JavaScript - -### How to Install +## Getting started +### Install the package ```bash npm install @azure/synapse-artifacts ``` -### How to use +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts -#### Sample code +## Examples -Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. +In the future, we will share samples here. ## Related projects - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) ![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 56d45b88fb53..5dc5714c9154 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -54,6 +54,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-artifacts.js.map'\" -o ./dist/synapse-artifacts.min.js ./dist/synapse-artifacts.js", "prepack": "npm install && npm run build", + "build:test": "echo skip", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-managed-endpoints/README.md b/sdk/synapse/synapse-managed-endpoints/README.md index facf70ce6862..5c4431ee7639 100644 --- a/sdk/synapse/synapse-managed-endpoints/README.md +++ b/sdk/synapse/synapse-managed-endpoints/README.md @@ -1,26 +1,47 @@ -## Azure ManagedPrivateEndpointsClient SDK for JavaScript +## Azure Synapse Managed Endpoints client library for JavaScript -This package contains an isomorphic SDK for ManagedPrivateEndpointsClient. +This package contains an isomorphic SDK for SparkClient. -### Currently supported environments - -- Node.js version 8.x.x or higher -- Browser JavaScript - -### How to Install +## Getting started +### Install the package ```bash npm install @azure/synapse-managed-endpoints ``` -### How to use +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts -#### Sample code +## Examples -Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. +In the future, we will share samples here. ## Related projects - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) ![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-managed-endpoints/package.json b/sdk/synapse/synapse-managed-endpoints/package.json index 867165784976..6c80c714029f 100644 --- a/sdk/synapse/synapse-managed-endpoints/package.json +++ b/sdk/synapse/synapse-managed-endpoints/package.json @@ -53,6 +53,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-endpoints.js.map'\" -o ./dist/synapse-managed-endpoints.min.js ./dist/synapse-managed-endpoints.js", "prepack": "npm install && npm run build", + "build:test": "echo skip", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-monitoring/README.md b/sdk/synapse/synapse-monitoring/README.md index eaf72b834c79..f052dab1968c 100644 --- a/sdk/synapse/synapse-monitoring/README.md +++ b/sdk/synapse/synapse-monitoring/README.md @@ -1,26 +1,47 @@ -## Azure MonitoringClient SDK for JavaScript +## Azure Synapse Monitoring client library for JavaScript -This package contains an isomorphic SDK for MonitoringClient. +This package contains an isomorphic SDK for SparkClient. -### Currently supported environments - -- Node.js version 8.x.x or higher -- Browser JavaScript - -### How to Install +## Getting started +### Install the package ```bash npm install @azure/synapse-monitoring ``` -### How to use +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts -#### Sample code +## Examples -Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. +In the future, we will share samples here. ## Related projects - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) ![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index 9a40bac7ef77..0fa1a70f690a 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -52,6 +52,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-monitoring.js.map'\" -o ./dist/synapse-monitoring.min.js ./dist/synapse-monitoring.js", "prepack": "npm install && npm run build", + "build:test": "echo skip", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-spark/README.md b/sdk/synapse/synapse-spark/README.md index 3eb104f23062..f1596c4f5e5f 100644 --- a/sdk/synapse/synapse-spark/README.md +++ b/sdk/synapse/synapse-spark/README.md @@ -1,26 +1,49 @@ -## Azure SparkClient SDK for JavaScript +## Azure Synapse Spark client library for JavaScript This package contains an isomorphic SDK for SparkClient. +## Getting started +### Install the package + +```bash +npm install @azure/synapse-spark +``` + ### Currently supported environments - Node.js version 8.x.x or higher - Browser JavaScript -### How to Install - -```bash -npm install @azure/synapse-spark -``` +## Key concepts ### How to use -#### Sample code +## Examples -Refer the sample code in the [azure-sdk-for-js-samples](https://github.com/Azure/azure-sdk-for-js-samples) repository. +In the future, we will share samples here. ## Related projects - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) ![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 6ac3a2bf5fbf..d5bdb9f35720 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -52,6 +52,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-spark.js.map'\" -o ./dist/synapse-spark.min.js ./dist/synapse-spark.js", "prepack": "npm install && npm run build", + "build:test": "echo skip", "extract-api": "api-extractor run --local" }, "sideEffects": false, From b8957452ed648472a7adb2ba3693f33242dcb7ae Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 1 Dec 2020 09:41:31 +1300 Subject: [PATCH 07/28] Add basic READMEs and test builds --- sdk/synapse/synapse-accesscontrol/README.md | 1 + sdk/synapse/synapse-artifacts/README.md | 1 + sdk/synapse/synapse-managed-endpoints/README.md | 1 + sdk/synapse/synapse-monitoring/README.md | 1 + sdk/synapse/synapse-spark/README.md | 1 + 5 files changed, 5 insertions(+) diff --git a/sdk/synapse/synapse-accesscontrol/README.md b/sdk/synapse/synapse-accesscontrol/README.md index 14eb381ce99f..08b03cee9c5c 100644 --- a/sdk/synapse/synapse-accesscontrol/README.md +++ b/sdk/synapse/synapse-accesscontrol/README.md @@ -3,6 +3,7 @@ This package contains an isomorphic SDK for SparkClient. ## Getting started + ### Install the package ```bash diff --git a/sdk/synapse/synapse-artifacts/README.md b/sdk/synapse/synapse-artifacts/README.md index f0cc2fb5b6cf..67ea2ea8a059 100644 --- a/sdk/synapse/synapse-artifacts/README.md +++ b/sdk/synapse/synapse-artifacts/README.md @@ -3,6 +3,7 @@ This package contains an isomorphic SDK for SparkClient. ## Getting started + ### Install the package ```bash diff --git a/sdk/synapse/synapse-managed-endpoints/README.md b/sdk/synapse/synapse-managed-endpoints/README.md index 5c4431ee7639..e98eef287ccc 100644 --- a/sdk/synapse/synapse-managed-endpoints/README.md +++ b/sdk/synapse/synapse-managed-endpoints/README.md @@ -3,6 +3,7 @@ This package contains an isomorphic SDK for SparkClient. ## Getting started + ### Install the package ```bash diff --git a/sdk/synapse/synapse-monitoring/README.md b/sdk/synapse/synapse-monitoring/README.md index f052dab1968c..1546ac7d0c7a 100644 --- a/sdk/synapse/synapse-monitoring/README.md +++ b/sdk/synapse/synapse-monitoring/README.md @@ -3,6 +3,7 @@ This package contains an isomorphic SDK for SparkClient. ## Getting started + ### Install the package ```bash diff --git a/sdk/synapse/synapse-spark/README.md b/sdk/synapse/synapse-spark/README.md index f1596c4f5e5f..5fa1a44ea839 100644 --- a/sdk/synapse/synapse-spark/README.md +++ b/sdk/synapse/synapse-spark/README.md @@ -3,6 +3,7 @@ This package contains an isomorphic SDK for SparkClient. ## Getting started + ### Install the package ```bash From 19ea7cc9c604d6ceac7663eb1803038ba3f80e2a Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 1 Dec 2020 09:44:32 +1300 Subject: [PATCH 08/28] Fix package versions --- sdk/synapse/synapse-accesscontrol/package.json | 2 +- sdk/synapse/synapse-artifacts/package.json | 2 +- sdk/synapse/synapse-managed-endpoints/package.json | 2 +- sdk/synapse/synapse-monitoring/package.json | 2 +- sdk/synapse/synapse-spark/package.json | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/synapse/synapse-accesscontrol/package.json b/sdk/synapse/synapse-accesscontrol/package.json index ec101bdc20b8..7b8c46b6dde7 100644 --- a/sdk/synapse/synapse-accesscontrol/package.json +++ b/sdk/synapse/synapse-accesscontrol/package.json @@ -2,7 +2,7 @@ "name": "@azure/synapse-accesscontrol", "author": "Microsoft Corporation", "description": "A generated SDK for AccessControlClient.", - "version": "1.0.0", + "version": "1.0.0-beta.1", "dependencies": { "@azure/core-paging": "^1.1.1", "@azure/core-http": "^1.2.0", diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 5dc5714c9154..59299e7ac989 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -2,7 +2,7 @@ "name": "@azure/synapse-artifacts", "author": "Microsoft Corporation", "description": "A generated SDK for ArtifactsClient.", - "version": "1.0.0", + "version": "1.0.0-beta.1", "dependencies": { "@azure/core-lro": "^1.0.2", "@azure/core-paging": "^1.1.1", diff --git a/sdk/synapse/synapse-managed-endpoints/package.json b/sdk/synapse/synapse-managed-endpoints/package.json index 6c80c714029f..eb14734ab3c5 100644 --- a/sdk/synapse/synapse-managed-endpoints/package.json +++ b/sdk/synapse/synapse-managed-endpoints/package.json @@ -2,7 +2,7 @@ "name": "@azure/synapse-managed-endpoints", "author": "Microsoft Corporation", "description": "A generated SDK for ManagedPrivateEndpointsClient.", - "version": "1.0.0", + "version": "1.0.0-beta.1", "dependencies": { "@azure/core-paging": "^1.1.1", "@azure/core-http": "^1.2.0", diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index 0fa1a70f690a..9af94ad4bb29 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -2,7 +2,7 @@ "name": "@azure/synapse-monitoring", "author": "Microsoft Corporation", "description": "A generated SDK for MonitoringClient.", - "version": "1.0.0", + "version": "1.0.0-beta.1", "dependencies": { "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index d5bdb9f35720..430524b9369d 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -2,7 +2,7 @@ "name": "@azure/synapse-spark", "author": "Microsoft Corporation", "description": "A generated SDK for SparkClient.", - "version": "1.0.0", + "version": "1.0.0-beta.1", "dependencies": { "@azure/core-http": "^1.2.0", "tslib": "^2.0.0" From b5a5b51cd823266c5650d0302b562c1d6014fe62 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 1 Dec 2020 10:07:50 +1300 Subject: [PATCH 09/28] Skip tests --- sdk/synapse/synapse-accesscontrol/package.json | 3 +++ sdk/synapse/synapse-artifacts/package.json | 3 +++ sdk/synapse/synapse-managed-endpoints/package.json | 3 +++ sdk/synapse/synapse-monitoring/package.json | 3 +++ sdk/synapse/synapse-spark/package.json | 3 +++ 5 files changed, 15 insertions(+) diff --git a/sdk/synapse/synapse-accesscontrol/package.json b/sdk/synapse/synapse-accesscontrol/package.json index 7b8c46b6dde7..503bea9d5401 100644 --- a/sdk/synapse/synapse-accesscontrol/package.json +++ b/sdk/synapse/synapse-accesscontrol/package.json @@ -54,6 +54,9 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-accesscontrol.js.map'\" -o ./dist/synapse-accesscontrol.min.js ./dist/synapse-accesscontrol.js", "prepack": "npm install && npm run build", "build:test": "echo skip", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 59299e7ac989..d932db74be49 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -55,6 +55,9 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-artifacts.js.map'\" -o ./dist/synapse-artifacts.min.js ./dist/synapse-artifacts.js", "prepack": "npm install && npm run build", "build:test": "echo skip", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-managed-endpoints/package.json b/sdk/synapse/synapse-managed-endpoints/package.json index eb14734ab3c5..7f1f9c041a63 100644 --- a/sdk/synapse/synapse-managed-endpoints/package.json +++ b/sdk/synapse/synapse-managed-endpoints/package.json @@ -54,6 +54,9 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-endpoints.js.map'\" -o ./dist/synapse-managed-endpoints.min.js ./dist/synapse-managed-endpoints.js", "prepack": "npm install && npm run build", "build:test": "echo skip", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index 9af94ad4bb29..a16d918b8707 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -53,6 +53,9 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-monitoring.js.map'\" -o ./dist/synapse-monitoring.min.js ./dist/synapse-monitoring.js", "prepack": "npm install && npm run build", "build:test": "echo skip", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 430524b9369d..0e33a1c65dfc 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -53,6 +53,9 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-spark.js.map'\" -o ./dist/synapse-spark.min.js ./dist/synapse-spark.js", "prepack": "npm install && npm run build", "build:test": "echo skip", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", "extract-api": "api-extractor run --local" }, "sideEffects": false, From 2aa05a70363461006d525e4afb9df3c163f271c2 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 2 Dec 2020 07:10:04 +1300 Subject: [PATCH 10/28] Address feedback --- dataplane.code-workspace | 4 ++-- rush.json | 4 ++-- sdk/synapse/synapse-accesscontrol/api-extractor.json | 2 +- sdk/synapse/synapse-artifacts/api-extractor.json | 2 +- .../README.md | 2 +- .../api-extractor.json | 2 +- .../package.json | 6 +++--- .../review/synapse-managed-endpoints.api.md | 2 +- .../rollup.config.js | 4 ++-- .../src/index.ts | 0 .../src/managedPrivateEndpointsClient.ts | 0 .../src/managedPrivateEndpointsClientContext.ts | 2 +- .../src/models/index.ts | 0 .../src/models/mappers.ts | 0 .../src/models/parameters.ts | 0 .../src/operations/index.ts | 0 .../src/operations/managedPrivateEndpoints.ts | 0 .../tsconfig.json | 0 sdk/synapse/synapse-monitoring/api-extractor.json | 2 +- sdk/synapse/synapse-spark/api-extractor.json | 2 +- 20 files changed, 17 insertions(+), 17 deletions(-) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/README.md (96%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/api-extractor.json (87%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/package.json (86%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/review/synapse-managed-endpoints.api.md (97%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/rollup.config.js (88%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/src/index.ts (100%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/src/managedPrivateEndpointsClient.ts (100%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/src/managedPrivateEndpointsClientContext.ts (96%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/src/models/index.ts (100%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/src/models/mappers.ts (100%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/src/models/parameters.ts (100%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/src/operations/index.ts (100%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/src/operations/managedPrivateEndpoints.ts (100%) rename sdk/synapse/{synapse-managed-endpoints => synapse-managed-private-endpoints}/tsconfig.json (100%) diff --git a/dataplane.code-workspace b/dataplane.code-workspace index 866851e4b749..b30dc777ef80 100644 --- a/dataplane.code-workspace +++ b/dataplane.code-workspace @@ -145,8 +145,8 @@ "path": "sdk/synapse/synapse-artifacts" }, { - "name": "synapse-managed-endpoints", - "path": "sdk/synapse/synapse-managed-endpoints" + "name": "synapse-managed-private-endpoints", + "path": "sdk/synapse/synapse-managed-private-endpoints" }, { "name": "synapse-monitoring", diff --git a/rush.json b/rush.json index 411e19f1b9fe..0925d5f9de73 100644 --- a/rush.json +++ b/rush.json @@ -563,8 +563,8 @@ "versionPolicyName": "client" }, { - "packageName": "@azure/synapse-managed-endpoints", - "projectFolder": "sdk/synapse/synapse-managed-endpoints", + "packageName": "@azure/synapse-managed-private-endpoints", + "projectFolder": "sdk/synapse/synapse-managed-private-endpoints", "versionPolicyName": "client" }, { diff --git a/sdk/synapse/synapse-accesscontrol/api-extractor.json b/sdk/synapse/synapse-accesscontrol/api-extractor.json index 5fb5b21b56f2..f674a4100a6a 100644 --- a/sdk/synapse/synapse-accesscontrol/api-extractor.json +++ b/sdk/synapse/synapse-accesscontrol/api-extractor.json @@ -6,7 +6,7 @@ "dtsRollup": { "enabled": true, "untrimmedFilePath": "", - "publicTrimmedFilePath": "./esm/index.d.ts" + "publicTrimmedFilePath": "./types/synapse-accesscontrol.d.ts" }, "messages": { "tsdocMessageReporting": { "default": { "logLevel": "none" } }, diff --git a/sdk/synapse/synapse-artifacts/api-extractor.json b/sdk/synapse/synapse-artifacts/api-extractor.json index 5fb5b21b56f2..80612d4592f6 100644 --- a/sdk/synapse/synapse-artifacts/api-extractor.json +++ b/sdk/synapse/synapse-artifacts/api-extractor.json @@ -6,7 +6,7 @@ "dtsRollup": { "enabled": true, "untrimmedFilePath": "", - "publicTrimmedFilePath": "./esm/index.d.ts" + "publicTrimmedFilePath": "./types/synapse-artifacts.d.ts" }, "messages": { "tsdocMessageReporting": { "default": { "logLevel": "none" } }, diff --git a/sdk/synapse/synapse-managed-endpoints/README.md b/sdk/synapse/synapse-managed-private-endpoints/README.md similarity index 96% rename from sdk/synapse/synapse-managed-endpoints/README.md rename to sdk/synapse/synapse-managed-private-endpoints/README.md index e98eef287ccc..a9dd26977a42 100644 --- a/sdk/synapse/synapse-managed-endpoints/README.md +++ b/sdk/synapse/synapse-managed-private-endpoints/README.md @@ -7,7 +7,7 @@ This package contains an isomorphic SDK for SparkClient. ### Install the package ```bash -npm install @azure/synapse-managed-endpoints +npm install @azure/synapse-managed-private-endpoints ``` ### Currently supported environments diff --git a/sdk/synapse/synapse-managed-endpoints/api-extractor.json b/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json similarity index 87% rename from sdk/synapse/synapse-managed-endpoints/api-extractor.json rename to sdk/synapse/synapse-managed-private-endpoints/api-extractor.json index 5fb5b21b56f2..3c5b8d02834e 100644 --- a/sdk/synapse/synapse-managed-endpoints/api-extractor.json +++ b/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json @@ -6,7 +6,7 @@ "dtsRollup": { "enabled": true, "untrimmedFilePath": "", - "publicTrimmedFilePath": "./esm/index.d.ts" + "publicTrimmedFilePath": "./types/synapse-managed-private-endpoints.d.ts" }, "messages": { "tsdocMessageReporting": { "default": { "logLevel": "none" } }, diff --git a/sdk/synapse/synapse-managed-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json similarity index 86% rename from sdk/synapse/synapse-managed-endpoints/package.json rename to sdk/synapse/synapse-managed-private-endpoints/package.json index 7f1f9c041a63..297bf998928d 100644 --- a/sdk/synapse/synapse-managed-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -1,5 +1,5 @@ { - "name": "@azure/synapse-managed-endpoints", + "name": "@azure/synapse-managed-private-endpoints", "author": "Microsoft Corporation", "description": "A generated SDK for ManagedPrivateEndpointsClient.", "version": "1.0.0-beta.1", @@ -16,7 +16,7 @@ "isomorphic" ], "license": "MIT", - "main": "./dist/synapse-managed-endpoints.js", + "main": "./dist/synapse-managed-private-endpoints.js", "module": "./esm/index.js", "types": "./esm/index.d.ts", "devDependencies": { @@ -51,7 +51,7 @@ ], "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", - "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-endpoints.js.map'\" -o ./dist/synapse-managed-endpoints.min.js ./dist/synapse-managed-endpoints.js", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-private-endpoints.js.map'\" -o ./dist/synapse-managed-private-endpoints.min.js ./dist/synapse-managed-private-endpoints.js", "prepack": "npm install && npm run build", "build:test": "echo skip", "unit-test:browser": "echo skipped", diff --git a/sdk/synapse/synapse-managed-endpoints/review/synapse-managed-endpoints.api.md b/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-endpoints.api.md similarity index 97% rename from sdk/synapse/synapse-managed-endpoints/review/synapse-managed-endpoints.api.md rename to sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-endpoints.api.md index 279522fa6880..01eb0fe7c8c1 100644 --- a/sdk/synapse/synapse-managed-endpoints/review/synapse-managed-endpoints.api.md +++ b/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-endpoints.api.md @@ -1,4 +1,4 @@ -## API Report File for "@azure/synapse-managed-endpoints" +## API Report File for "@azure/synapse-managed-private-endpoints" > Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). diff --git a/sdk/synapse/synapse-managed-endpoints/rollup.config.js b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js similarity index 88% rename from sdk/synapse/synapse-managed-endpoints/rollup.config.js rename to sdk/synapse/synapse-managed-private-endpoints/rollup.config.js index bd02ade8368c..bcc4c1c74437 100644 --- a/sdk/synapse/synapse-managed-endpoints/rollup.config.js +++ b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js @@ -9,9 +9,9 @@ const config = { input: "./esm/managedPrivateEndpointsClient.js", external: ["@azure/core-http", "@azure/core-arm"], output: { - file: "./dist/synapse-managed-endpoints.js", + file: "./dist/synapse-managed-private-endpoints.js", format: "umd", - name: "Azure.SynapseManagedEndpoints", + name: "Azure.SynapseManagedPrivateEndpoints", sourcemap: true, globals: { "@azure/core-http": "coreHttp", diff --git a/sdk/synapse/synapse-managed-endpoints/src/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/index.ts similarity index 100% rename from sdk/synapse/synapse-managed-endpoints/src/index.ts rename to sdk/synapse/synapse-managed-private-endpoints/src/index.ts diff --git a/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClient.ts b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts similarity index 100% rename from sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClient.ts rename to sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts diff --git a/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClientContext.ts b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts similarity index 96% rename from sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClientContext.ts rename to sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts index 8b619e1e7781..c9345f166a0b 100644 --- a/sdk/synapse/synapse-managed-endpoints/src/managedPrivateEndpointsClientContext.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts @@ -1,7 +1,7 @@ import * as coreHttp from "@azure/core-http"; import { ManagedPrivateEndpointsClientOptionalParams } from "./models"; -const packageName = "@azure/synapse-managed-endpoints"; +const packageName = "@azure/synapse-managed-private-endpoints"; const packageVersion = "1.0.0"; export class ManagedPrivateEndpointsClientContext extends coreHttp.ServiceClient { diff --git a/sdk/synapse/synapse-managed-endpoints/src/models/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts similarity index 100% rename from sdk/synapse/synapse-managed-endpoints/src/models/index.ts rename to sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts diff --git a/sdk/synapse/synapse-managed-endpoints/src/models/mappers.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts similarity index 100% rename from sdk/synapse/synapse-managed-endpoints/src/models/mappers.ts rename to sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts diff --git a/sdk/synapse/synapse-managed-endpoints/src/models/parameters.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts similarity index 100% rename from sdk/synapse/synapse-managed-endpoints/src/models/parameters.ts rename to sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts diff --git a/sdk/synapse/synapse-managed-endpoints/src/operations/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts similarity index 100% rename from sdk/synapse/synapse-managed-endpoints/src/operations/index.ts rename to sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts diff --git a/sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts similarity index 100% rename from sdk/synapse/synapse-managed-endpoints/src/operations/managedPrivateEndpoints.ts rename to sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts diff --git a/sdk/synapse/synapse-managed-endpoints/tsconfig.json b/sdk/synapse/synapse-managed-private-endpoints/tsconfig.json similarity index 100% rename from sdk/synapse/synapse-managed-endpoints/tsconfig.json rename to sdk/synapse/synapse-managed-private-endpoints/tsconfig.json diff --git a/sdk/synapse/synapse-monitoring/api-extractor.json b/sdk/synapse/synapse-monitoring/api-extractor.json index 5fb5b21b56f2..bee41e4ee6ab 100644 --- a/sdk/synapse/synapse-monitoring/api-extractor.json +++ b/sdk/synapse/synapse-monitoring/api-extractor.json @@ -6,7 +6,7 @@ "dtsRollup": { "enabled": true, "untrimmedFilePath": "", - "publicTrimmedFilePath": "./esm/index.d.ts" + "publicTrimmedFilePath": "./types/synapse-monitoring.d.ts" }, "messages": { "tsdocMessageReporting": { "default": { "logLevel": "none" } }, diff --git a/sdk/synapse/synapse-spark/api-extractor.json b/sdk/synapse/synapse-spark/api-extractor.json index 5fb5b21b56f2..bee41e4ee6ab 100644 --- a/sdk/synapse/synapse-spark/api-extractor.json +++ b/sdk/synapse/synapse-spark/api-extractor.json @@ -6,7 +6,7 @@ "dtsRollup": { "enabled": true, "untrimmedFilePath": "", - "publicTrimmedFilePath": "./esm/index.d.ts" + "publicTrimmedFilePath": "./types/synapse-monitoring.d.ts" }, "messages": { "tsdocMessageReporting": { "default": { "logLevel": "none" } }, From fe8655709a51e3955d8ef7d383f943c860e570d6 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 2 Dec 2020 07:23:28 +1300 Subject: [PATCH 11/28] Address feedback --- common/config/rush/pnpm-lock.yaml | 15 ++++++++------- ...d => synapse-managed-private-endpoints.api.md} | 0 2 files changed, 8 insertions(+), 7 deletions(-) rename sdk/synapse/synapse-managed-private-endpoints/review/{synapse-managed-endpoints.api.md => synapse-managed-private-endpoints.api.md} (100%) diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index 574d8020bfc7..4f2447581470 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -49,7 +49,7 @@ dependencies: '@rush-temp/storage-queue': 'file:projects/storage-queue.tgz' '@rush-temp/synapse-accesscontrol': 'file:projects/synapse-accesscontrol.tgz' '@rush-temp/synapse-artifacts': 'file:projects/synapse-artifacts.tgz' - '@rush-temp/synapse-managed-endpoints': 'file:projects/synapse-managed-endpoints.tgz' + '@rush-temp/synapse-managed-private-endpoints': 'file:projects/synapse-managed-private-endpoints.tgz' '@rush-temp/synapse-monitoring': 'file:projects/synapse-monitoring.tgz' '@rush-temp/synapse-spark': 'file:projects/synapse-spark.tgz' '@rush-temp/template': 'file:projects/template.tgz' @@ -8764,7 +8764,7 @@ packages: dev: false name: '@rush-temp/ai-text-analytics' resolution: - integrity: sha512-VLKH85eh2Gp7BAGZFRL5CbF8Q5uHXP/hJwkDUGU34MwAi157MMnb9o0+zrpMKUdyYyQ+XhkVpluCbR75GxMGag== + integrity: sha512-XOI2yx/SP/BU3sS09tB73xNR6qjbtGD5AYwruw6X1DHIl+z9JkFAHCF5UcQFd76yYUEfZcL9eeLfX+iJJvKl2w== tarball: 'file:projects/ai-text-analytics.tgz' version: 0.0.0 'file:projects/app-configuration.tgz': @@ -10919,7 +10919,7 @@ packages: integrity: sha512-z1Mzvmykm4QueMmQH8u3rs2Zo3NKDB6DOUDZpbxQd+uBfJkpCLtF3dVeweKusU1kUZh0lwB5CyEIr48QgGm/3w== tarball: 'file:projects/synapse-artifacts.tgz' version: 0.0.0 - 'file:projects/synapse-managed-endpoints.tgz': + 'file:projects/synapse-managed-private-endpoints.tgz': dependencies: '@microsoft/api-extractor': 7.7.11 rollup: 1.32.1 @@ -10929,10 +10929,10 @@ packages: typescript: 3.9.7 uglify-js: 3.12.0 dev: false - name: '@rush-temp/synapse-managed-endpoints' + name: '@rush-temp/synapse-managed-private-endpoints' resolution: - integrity: sha512-bF8Y9PRUZY4nG9Xeu/yxWXSabNdd7XJPbFOwzzMyRX+Ti37Kidp3FBI90sxBRdCSVFNXPHUQaDeKXQ1LDvAC8w== - tarball: 'file:projects/synapse-managed-endpoints.tgz' + integrity: sha512-6aMZRNXwJ7g27eg3aeMp937no7JUghoBSJZ2WnGYJ28mOGii4nNGuTXVad6wVcJc9o5uCsp6hhbjSmzxFszwZQ== + tarball: 'file:projects/synapse-managed-private-endpoints.tgz' version: 0.0.0 'file:projects/synapse-monitoring.tgz': dependencies: @@ -11106,6 +11106,7 @@ packages: integrity: sha512-uZaBXfZM1ISycMx+p8kUd/Yqui/KgjgUm/CyAzxK2wkOmqKF66JuLD5lbkH4tXBi+F5rL59UaMhjjxskb6qbzA== tarball: 'file:projects/testhub.tgz' version: 0.0.0 +registry: '' specifiers: '@rush-temp/abort-controller': 'file:./projects/abort-controller.tgz' '@rush-temp/ai-anomaly-detector': 'file:./projects/ai-anomaly-detector.tgz' @@ -11157,7 +11158,7 @@ specifiers: '@rush-temp/storage-queue': 'file:./projects/storage-queue.tgz' '@rush-temp/synapse-accesscontrol': 'file:./projects/synapse-accesscontrol.tgz' '@rush-temp/synapse-artifacts': 'file:./projects/synapse-artifacts.tgz' - '@rush-temp/synapse-managed-endpoints': 'file:./projects/synapse-managed-endpoints.tgz' + '@rush-temp/synapse-managed-private-endpoints': 'file:./projects/synapse-managed-private-endpoints.tgz' '@rush-temp/synapse-monitoring': 'file:./projects/synapse-monitoring.tgz' '@rush-temp/synapse-spark': 'file:./projects/synapse-spark.tgz' '@rush-temp/template': 'file:./projects/template.tgz' diff --git a/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-endpoints.api.md b/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md similarity index 100% rename from sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-endpoints.api.md rename to sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md From 5b52913e46313cee18715fcb2bce38d2557b6c50 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 2 Dec 2020 08:41:04 +1300 Subject: [PATCH 12/28] Rerun swagger codegen --- .../synapse-accesscontrol/package.json | 3 +- .../src/accessControlClient.ts | 6 +- .../synapse-accesscontrol/src/models/index.ts | 3 +- sdk/synapse/synapse-artifacts/package.json | 3 +- .../review/synapse-artifacts.api.md | 16 +- .../src/lro/azureAsyncOperationStrategy.ts | 42 +++-- .../src/lro/bodyPollingStrategy.ts | 5 +- .../src/lro/locationStrategy.ts | 7 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 8 +- .../synapse-artifacts/src/lro/lroPoller.ts | 27 +++- .../synapse-artifacts/src/lro/models.ts | 8 +- .../synapse-artifacts/src/lro/operation.ts | 4 +- .../synapse-artifacts/src/lro/requestUtils.ts | 16 +- .../synapse-artifacts/src/models/index.ts | 135 +++++++++------- .../synapse-artifacts/src/models/mappers.ts | 148 ++++++++++-------- .../src/operations/bigDataPools.ts | 14 +- .../src/operations/dataFlow.ts | 34 +++- .../src/operations/dataFlowDebugSession.ts | 39 +++-- .../src/operations/dataset.ts | 41 +++-- .../src/operations/integrationRuntimes.ts | 23 ++- .../src/operations/linkedService.ts | 38 ++++- .../src/operations/notebook.ts | 43 +++-- .../src/operations/pipeline.ts | 34 +++- .../src/operations/pipelineRun.ts | 6 +- .../src/operations/sparkJobDefinition.ts | 38 ++++- .../src/operations/sqlPools.ts | 19 ++- .../src/operations/sqlScript.ts | 20 ++- .../src/operations/trigger.ts | 60 +++++-- .../src/operations/triggerRun.ts | 17 +- .../src/operations/workspace.ts | 7 +- .../operations/workspaceGitRepoManagement.ts | 6 +- .../package.json | 3 +- .../src/models/index.ts | 3 +- .../src/operations/managedPrivateEndpoints.ts | 55 ++++--- sdk/synapse/synapse-monitoring/package.json | 3 +- .../synapse-monitoring/src/models/index.ts | 9 +- .../src/operations/monitoring.ts | 7 +- sdk/synapse/synapse-spark/api-extractor.json | 2 +- sdk/synapse/synapse-spark/package.json | 3 +- sdk/synapse/synapse-spark/src/models/index.ts | 21 ++- .../src/operations/sparkBatch.ts | 12 +- .../src/operations/sparkSession.ts | 12 +- .../synapse-spark/src/sparkClientContext.ts | 3 +- 43 files changed, 704 insertions(+), 299 deletions(-) diff --git a/sdk/synapse/synapse-accesscontrol/package.json b/sdk/synapse/synapse-accesscontrol/package.json index 503bea9d5401..f97c4061e92b 100644 --- a/sdk/synapse/synapse-accesscontrol/package.json +++ b/sdk/synapse/synapse-accesscontrol/package.json @@ -18,7 +18,7 @@ "license": "MIT", "main": "./dist/synapse-accesscontrol.js", "module": "./esm/index.js", - "types": "./esm/index.d.ts", + "types": "./types/synapse-accesscontrol.d.ts", "devDependencies": { "typescript": "~3.9.3", "rollup": "^1.16.3", @@ -53,6 +53,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-accesscontrol.js.map'\" -o ./dist/synapse-accesscontrol.min.js ./dist/synapse-accesscontrol.js", "prepack": "npm install && npm run build", + "pack": "npm pack 2>&1", "build:test": "echo skip", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", diff --git a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts index 61aeccd43e68..0a7ff0c8a2e3 100644 --- a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts +++ b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts @@ -291,7 +291,11 @@ const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], + queryParameters: [ + Parameters.apiVersion, + Parameters.roleId1, + Parameters.principalId + ], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.continuationToken], serializer diff --git a/sdk/synapse/synapse-accesscontrol/src/models/index.ts b/sdk/synapse/synapse-accesscontrol/src/models/index.ts index 8dc93686ed46..e651fb487f46 100644 --- a/sdk/synapse/synapse-accesscontrol/src/models/index.ts +++ b/sdk/synapse/synapse-accesscontrol/src/models/index.ts @@ -269,7 +269,8 @@ export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListRespons /** * Optional parameters. */ -export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface AccessControlClientOptionalParams + extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index d932db74be49..806ba699d86b 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -19,7 +19,7 @@ "license": "MIT", "main": "./dist/synapse-artifacts.js", "module": "./esm/index.js", - "types": "./esm/index.d.ts", + "types": "./types/synapse-artifacts.d.ts", "devDependencies": { "typescript": "~3.9.3", "rollup": "^1.16.3", @@ -54,6 +54,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-artifacts.js.map'\" -o ./dist/synapse-artifacts.min.js ./dist/synapse-artifacts.js", "prepack": "npm install && npm run build", + "pack": "npm pack 2>&1", "build:test": "echo skip", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md index dcbaaa3c40b0..65a431327363 100644 --- a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -66,7 +66,7 @@ export interface ActivityRunsQueryResponse { } // @public (undocumented) -export type ActivityUnion = ControlActivity | ExecutionActivityUnion | ExecutePipelineActivity | IfConditionActivity | SwitchActivity | ForEachActivity | WaitActivity | UntilActivity | ValidationActivity | FilterActivity | SetVariableActivity | AppendVariableActivity | WebHookActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity | SqlPoolStoredProcedureActivity; +export type ActivityUnion = ControlActivity | ExecutionActivityUnion | ExecutePipelineActivity | IfConditionActivity | SwitchActivity | ForEachActivity | WaitActivity | UntilActivity | ValidationActivity | FilterActivity | SetVariableActivity | AppendVariableActivity | WebHookActivity | SqlPoolStoredProcedureActivity; // @public export interface AddDataFlowToDebugSessionResponse { @@ -859,12 +859,12 @@ export type BlobEventsTrigger = MultiplePipelineTrigger & { blobPathBeginsWith?: string; blobPathEndsWith?: string; ignoreEmptyBlobs?: boolean; - events: BlobEventTypes[]; + events: BlobEventType[]; scope: string; }; // @public -export type BlobEventTypes = string; +export type BlobEventType = string; // @public export type BlobSink = CopySink & { @@ -1988,7 +1988,7 @@ export type ExecutionActivity = Activity & { }; // @public (undocumented) -export type ExecutionActivityUnion = CopyActivity | HDInsightHiveActivity | HDInsightPigActivity | HDInsightMapReduceActivity | HDInsightStreamingActivity | HDInsightSparkActivity | ExecuteSsisPackageActivity | CustomActivity | SqlServerStoredProcedureActivity | DeleteActivity | AzureDataExplorerCommandActivity | LookupActivity | WebActivity | GetMetadataActivity | AzureMLBatchExecutionActivity | AzureMLUpdateResourceActivity | AzureMLExecutePipelineActivity | DataLakeAnalyticsUsqlActivity | DatabricksNotebookActivity | DatabricksSparkJarActivity | DatabricksSparkPythonActivity | AzureFunctionActivity | ExecuteDataFlowActivity; +export type ExecutionActivityUnion = CopyActivity | HDInsightHiveActivity | HDInsightPigActivity | HDInsightMapReduceActivity | HDInsightStreamingActivity | HDInsightSparkActivity | ExecuteSsisPackageActivity | CustomActivity | SqlServerStoredProcedureActivity | DeleteActivity | AzureDataExplorerCommandActivity | LookupActivity | WebActivity | GetMetadataActivity | AzureMLBatchExecutionActivity | AzureMLUpdateResourceActivity | AzureMLExecutePipelineActivity | DataLakeAnalyticsUsqlActivity | DatabricksNotebookActivity | DatabricksSparkJarActivity | DatabricksSparkPythonActivity | AzureFunctionActivity | ExecuteDataFlowActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity; // @public export interface ExposureControlRequest { @@ -2807,7 +2807,7 @@ export const enum KnownBigDataPoolReferenceType { } // @public -export const enum KnownBlobEventTypes { +export const enum KnownBlobEventType { // (undocumented) MicrosoftStorageBlobCreated = "Microsoft.Storage.BlobCreated", // (undocumented) @@ -6057,7 +6057,7 @@ export type SybaseTableDataset = Dataset & { }; // @public -export type SynapseNotebookActivity = Activity & { +export type SynapseNotebookActivity = ExecutionActivity & { notebook: SynapseNotebookReference; parameters?: { [propertyName: string]: any; @@ -6071,7 +6071,7 @@ export interface SynapseNotebookReference { } // @public -export type SynapseSparkJobDefinitionActivity = Activity & { +export type SynapseSparkJobDefinitionActivity = ExecutionActivity & { sparkJob: SynapseSparkJobReference; }; @@ -6612,7 +6612,7 @@ export type ZohoSource = TabularSource & { // Warnings were encountered during analysis: // -// src/models/index.ts:15186:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts +// src/models/index.ts:15209:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts // (No @packageDocumentation comment for this package) diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index 3c2fd2e5d6c9..0b5d4232590f 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -6,7 +6,11 @@ import { FinalStateVia, LROSYM } from "./models"; -import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; +import { + OperationSpec, + OperationArguments, + OperationResponse +} from "@azure/core-http"; import { terminalStates } from "./constants"; import { SendOperationFn } from "."; @@ -17,11 +21,14 @@ export function createAzureAsyncOperationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); + throw new Error( + "Expected lroData to be defined for Azure-AsyncOperation strategy" + ); } let currentOperation = initialOperation; - let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; + let lastKnownPollingUrl = + lroData.azureAsyncOperation || lroData.operationLocation; return { isTerminal: () => { @@ -52,12 +59,17 @@ export function createAzureAsyncOperationStrategy( const initialOperationResult = initialOperation.result._response[LROSYM]; const currentOperationResult = currentOperation.result._response[LROSYM]; - if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { + if ( + !shouldPerformFinalGet(initialOperationResult, currentOperationResult) + ) { return currentOperation; } if (initialOperationResult?.requestMethod === "PUT") { - currentOperation = await sendFinalGet(initialOperation, sendOperationFn); + currentOperation = await sendFinalGet( + initialOperation, + sendOperationFn + ); return currentOperation; } @@ -65,20 +77,29 @@ export function createAzureAsyncOperationStrategy( if (initialOperationResult?.location) { switch (finalStateVia) { case "original-uri": - currentOperation = await sendFinalGet(initialOperation, sendOperationFn); + currentOperation = await sendFinalGet( + initialOperation, + sendOperationFn + ); return currentOperation; case "azure-async-operation": return currentOperation; case "location": default: - const location = initialOperationResult.location || currentOperationResult?.location; + const location = + initialOperationResult.location || + currentOperationResult?.location; if (!location) { throw new Error("Couldn't determine final GET URL from location"); } - return await sendFinalGet(initialOperation, sendOperationFn, location); + return await sendFinalGet( + initialOperation, + sendOperationFn, + location + ); } } @@ -156,7 +177,10 @@ function getCompositeMappers(responses: { }, {} as { [responseCode: string]: OperationResponse }); } -function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { +function shouldPerformFinalGet( + initialResult?: LROResponseInfo, + currentResult?: LROResponseInfo +) { const { status } = currentResult || {}; const { requestMethod: initialRequestMethod, location } = initialResult || {}; if (status && status.toLowerCase() !== "succeeded") { diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index 49333c25b430..62ed188e691e 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -44,7 +44,10 @@ export function createBodyPollingStrategy( }; // Execute the polling operation - initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); + initialOperation.result = await sendOperation( + initialOperation.args, + pollingSpec + ); return initialOperation; } }; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index 008f60177503..cfcfa8efd0a7 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -8,7 +8,9 @@ export function createLocationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); + throw new Error( + "Expected lroData to be defined for Azure-AsyncOperation strategy" + ); } let currentOperation = initialOperation; @@ -49,7 +51,8 @@ export function createLocationStrategy( const result = await sendOperationFn(pollingArgs, pollingSpec); // Update latest polling url - lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; + lastKnownPollingUrl = + result._response[LROSYM]?.location || lastKnownPollingUrl; // Update lastOperation result currentOperation = { diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index 1e1ec61db3b3..e686401a5cf9 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -21,8 +21,12 @@ class LROPolicy extends BaseRequestPolicy { super(nextPolicy, options); } - public async sendRequest(webResource: WebResource): Promise { - let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); + public async sendRequest( + webResource: WebResource + ): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest( + webResource + ); const _lroData = getLROData(result); result[LROSYM] = _lroData; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index 9ab5f25fb49a..72d979bd46a9 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -1,6 +1,17 @@ import { Poller } from "@azure/core-lro"; -import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; -import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; +import { + OperationSpec, + OperationArguments, + delay, + RestError +} from "@azure/core-http"; +import { + BaseResult, + LROOperationState, + LROOperationStep, + FinalStateVia, + LROSYM +} from "./models"; import { makeOperation } from "./operation"; import { createBodyPollingStrategy } from "./bodyPollingStrategy"; import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; @@ -59,7 +70,11 @@ export class LROPoller extends Poller< result: initialOperationResult }; - const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); + const pollingStrategy = getPollingStrategy( + initialOperation, + sendOperation, + finalStateVia + ); const state: LROOperationState = { // Initial operation will become the last operation @@ -112,7 +127,11 @@ function getPollingStrategy( } if (lroData.azureAsyncOperation || lroData.operationLocation) { - return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); + return createAzureAsyncOperationStrategy( + initialOperation, + sendOperationFn, + finalStateVia + ); } if (lroData.location) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index 704c731cf370..ed0dd9132876 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -8,7 +8,10 @@ import { import { PollOperationState, PollOperation } from "@azure/core-lro"; export const LROSYM = Symbol("LROData"); -export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; +export type FinalStateVia = + | "azure-async-operation" + | "location" + | "original-uri"; export interface LROResponseInfo { requestMethod: HttpMethods; @@ -44,7 +47,8 @@ export interface LROOperationStep { result: TResult; } -export interface LROOperationState extends PollOperationState { +export interface LROOperationState + extends PollOperationState { lastOperation: LROOperationStep; initialOperation: LROOperationStep; pollingStrategy: LROStrategy; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index e1e3fa2e1126..9b37277e7b2a 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -41,7 +41,9 @@ async function update( const currentLroData = currentResponse.result._response[LROSYM]; if (!currentLroData) { - throw new Error("Expected lroData to be defined for updating LRO operation"); + throw new Error( + "Expected lroData to be defined for updating LRO operation" + ); } if (state.result) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index 3c518804edec..e3289b95905b 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -21,10 +21,17 @@ export function shouldDeserializeLRO(finalStateVia?: string) { isInitialRequest = false; } - if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { + if ( + initialOperationInfo.azureAsyncOperation || + initialOperationInfo.operationLocation + ) { return ( !isInitialRequest && - isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) + isAsyncOperationFinalResponse( + response, + initialOperationInfo, + finalStateVia + ) ); } @@ -62,7 +69,10 @@ function isAsyncOperationFinalResponse( return true; } - if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { + if ( + initialOperationInfo.requestMethod !== "PUT" && + !initialOperationInfo.location + ) { return true; } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 5389dc744b1e..4bcfe16ee83c 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -192,8 +192,6 @@ export type ActivityUnion = | SetVariableActivity | AppendVariableActivity | WebHookActivity - | SynapseNotebookActivity - | SynapseSparkJobDefinitionActivity | SqlPoolStoredProcedureActivity; export type TriggerUnion = | RerunTumblingWindowTrigger @@ -201,7 +199,9 @@ export type TriggerUnion = | TumblingWindowTrigger | ChainingTrigger; export type DataFlowUnion = MappingDataFlow; -export type IntegrationRuntimeUnion = ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; +export type IntegrationRuntimeUnion = + | ManagedIntegrationRuntime + | SelfHostedIntegrationRuntime; export type SecretBaseUnion = SecureString | AzureKeyVaultSecretReference; export type DatasetLocationUnion = | AzureBlobStorageLocation @@ -349,8 +349,13 @@ export type ExecutionActivityUnion = | DatabricksSparkJarActivity | DatabricksSparkPythonActivity | AzureFunctionActivity - | ExecuteDataFlowActivity; -export type MultiplePipelineTriggerUnion = ScheduleTrigger | BlobTrigger | BlobEventsTrigger; + | ExecuteDataFlowActivity + | SynapseNotebookActivity + | SynapseSparkJobDefinitionActivity; +export type MultiplePipelineTriggerUnion = + | ScheduleTrigger + | BlobTrigger + | BlobEventsTrigger; export type TabularSourceUnion = | AzureTableSource | InformixSource @@ -3129,7 +3134,12 @@ export interface DatasetStorageFormat { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; + type: + | "TextFormat" + | "JsonFormat" + | "AvroFormat" + | "OrcFormat" + | "ParquetFormat"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -3319,7 +3329,10 @@ export interface FormatWriteSettings { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; + type: + | "AvroWriteSettings" + | "DelimitedTextWriteSettings" + | "JsonWriteSettings"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -8849,30 +8862,6 @@ export type WebHookActivity = Activity & { reportStatusOnCallBack?: any; }; -/** - * Execute Synapse notebook activity. - */ -export type SynapseNotebookActivity = Activity & { - /** - * Synapse notebook reference. - */ - notebook: SynapseNotebookReference; - /** - * Notebook parameters. - */ - parameters?: { [propertyName: string]: any }; -}; - -/** - * Execute spark job activity. - */ -export type SynapseSparkJobDefinitionActivity = Activity & { - /** - * Synapse spark job reference. - */ - sparkJob: SynapseSparkJobReference; -}; - /** * Execute SQL pool stored procedure activity. */ @@ -11757,6 +11746,30 @@ export type ExecuteDataFlowActivity = ExecutionActivity & { compute?: ExecuteDataFlowActivityTypePropertiesCompute; }; +/** + * Execute Synapse notebook activity. + */ +export type SynapseNotebookActivity = ExecutionActivity & { + /** + * Synapse notebook reference. + */ + notebook: SynapseNotebookReference; + /** + * Notebook parameters. + */ + parameters?: { [propertyName: string]: any }; +}; + +/** + * Execute spark job activity. + */ +export type SynapseSparkJobDefinitionActivity = ExecutionActivity & { + /** + * Synapse spark job reference. + */ + sparkJob: SynapseSparkJobReference; +}; + /** * Trigger that creates pipeline runs periodically, on schedule. */ @@ -11804,7 +11817,7 @@ export type BlobEventsTrigger = MultiplePipelineTrigger & { /** * The type of events that cause this trigger to fire. */ - events: BlobEventTypes[]; + events: BlobEventType[]; /** * The ARM resource ID of the Storage Account. */ @@ -14340,22 +14353,22 @@ export const enum KnownRecurrenceFrequency { export type RecurrenceFrequency = string; /** - * Known values of {@link BlobEventTypes} that the service accepts. + * Known values of {@link BlobEventType} that the service accepts. */ -export const enum KnownBlobEventTypes { +export const enum KnownBlobEventType { MicrosoftStorageBlobCreated = "Microsoft.Storage.BlobCreated", MicrosoftStorageBlobDeleted = "Microsoft.Storage.BlobDeleted" } /** - * Defines values for BlobEventTypes. \ - * {@link KnownBlobEventTypes} can be used interchangeably with BlobEventTypes, + * Defines values for BlobEventType. \ + * {@link KnownBlobEventType} can be used interchangeably with BlobEventType, * this enum contains the known values that the service supports. * ### Know values supported by the service * **Microsoft.Storage.BlobCreated** \ * **Microsoft.Storage.BlobDeleted** */ -export type BlobEventTypes = string; +export type BlobEventType = string; /** * Known values of {@link TumblingWindowFrequency} that the service accepts. @@ -14594,7 +14607,8 @@ export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceReso /** * Optional parameters. */ -export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14664,7 +14678,8 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { /** * Optional parameters. */ -export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { +export interface DatasetCreateOrUpdateDatasetOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14698,7 +14713,8 @@ export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { /** * Optional parameters. */ -export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { +export interface DatasetGetDatasetOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14768,7 +14784,8 @@ export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { /** * Optional parameters. */ -export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14802,7 +14819,8 @@ export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { +export interface PipelineGetPipelineOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14832,7 +14850,8 @@ export type PipelineGetPipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams + extends coreHttp.OperationOptions { /** * Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ @@ -14954,7 +14973,8 @@ export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { /** * Optional parameters. */ -export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { +export interface PipelineRunCancelPipelineRunOptionalParams + extends coreHttp.OperationOptions { /** * If true, cancel all the Child pipelines that are triggered by the current pipeline. */ @@ -14984,7 +15004,8 @@ export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { /** * Optional parameters. */ -export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15018,7 +15039,8 @@ export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { /** * Optional parameters. */ -export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { +export interface TriggerGetTriggerOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15156,7 +15178,8 @@ export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResp /** * Optional parameters. */ -export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { +export interface DataFlowCreateOrUpdateDataFlowOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15190,7 +15213,8 @@ export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { /** * Optional parameters. */ -export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15388,7 +15412,8 @@ export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & /** * Optional parameters. */ -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15418,7 +15443,8 @@ export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { /** * Optional parameters. */ -export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { +export interface SqlScriptGetSqlScriptOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15658,7 +15684,8 @@ export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse /** * Optional parameters. */ -export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { +export interface NotebookCreateOrUpdateNotebookOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15692,7 +15719,8 @@ export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { /** * Optional parameters. */ -export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { +export interface NotebookGetNotebookOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15933,7 +15961,8 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces /** * Optional parameters. */ -export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface ArtifactsClientOptionalParams + extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index 8132ad51a281..424331700c33 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -14065,55 +14065,6 @@ export const WebHookActivity: coreHttp.CompositeMapper = { } }; -export const SynapseNotebookActivity: coreHttp.CompositeMapper = { - serializedName: "SynapseNotebook", - type: { - name: "Composite", - className: "SynapseNotebookActivity", - uberParent: "Activity", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, - modelProperties: { - ...Activity.type.modelProperties, - notebook: { - serializedName: "typeProperties.notebook", - type: { - name: "Composite", - className: "SynapseNotebookReference" - } - }, - parameters: { - serializedName: "typeProperties.parameters", - type: { - name: "Dictionary", - value: { type: { name: "any" } } - } - } - } - } -}; - -export const SynapseSparkJobDefinitionActivity: coreHttp.CompositeMapper = { - serializedName: "SparkJob", - type: { - name: "Composite", - className: "SynapseSparkJobDefinitionActivity", - uberParent: "Activity", - additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, - modelProperties: { - ...Activity.type.modelProperties, - sparkJob: { - serializedName: "typeProperties.sparkJob", - type: { - name: "Composite", - className: "SynapseSparkJobReference" - } - } - } - } -}; - export const SqlPoolStoredProcedureActivity: coreHttp.CompositeMapper = { serializedName: "SqlPoolStoredProcedure", type: { @@ -14802,7 +14753,8 @@ export const TextFormat: coreHttp.CompositeMapper = { className: "TextFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, columnDelimiter: { @@ -14870,7 +14822,8 @@ export const JsonFormat: coreHttp.CompositeMapper = { className: "JsonFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, filePattern: { @@ -14914,7 +14867,8 @@ export const AvroFormat: coreHttp.CompositeMapper = { className: "AvroFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14928,7 +14882,8 @@ export const OrcFormat: coreHttp.CompositeMapper = { className: "OrcFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14942,7 +14897,8 @@ export const ParquetFormat: coreHttp.CompositeMapper = { className: "ParquetFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -15029,7 +14985,8 @@ export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebAnonymousAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties } @@ -15042,7 +14999,8 @@ export const WebBasicAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebBasicAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, username: { @@ -15069,7 +15027,8 @@ export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebClientCertificateAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, pfx: { @@ -17683,7 +17642,9 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap ...DependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -17695,7 +17656,9 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap }, size: { constraints: { - Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -17714,7 +17677,8 @@ export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeKeyAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: + LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, key: { @@ -17734,7 +17698,8 @@ export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeRbacAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: + LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, resourceId: { @@ -19573,6 +19538,55 @@ export const ExecuteDataFlowActivity: coreHttp.CompositeMapper = { } }; +export const SynapseNotebookActivity: coreHttp.CompositeMapper = { + serializedName: "SynapseNotebook", + type: { + name: "Composite", + className: "SynapseNotebookActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + notebook: { + serializedName: "typeProperties.notebook", + type: { + name: "Composite", + className: "SynapseNotebookReference" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const SynapseSparkJobDefinitionActivity: coreHttp.CompositeMapper = { + serializedName: "SparkJob", + type: { + name: "Composite", + className: "SynapseSparkJobDefinitionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + sparkJob: { + serializedName: "typeProperties.sparkJob", + type: { + name: "Composite", + className: "SynapseSparkJobReference" + } + } + } + } +}; + export const ScheduleTrigger: coreHttp.CompositeMapper = { serializedName: "ScheduleTrigger", type: { @@ -21040,7 +21054,9 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper ...TriggerDependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -21051,7 +21067,9 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper }, size: { constraints: { - Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -21304,8 +21322,6 @@ export let discriminators = { "Activity.SetVariable": SetVariableActivity, "Activity.AppendVariable": AppendVariableActivity, "Activity.WebHook": WebHookActivity, - "Activity.SynapseNotebook": SynapseNotebookActivity, - "Activity.SparkJob": SynapseSparkJobDefinitionActivity, "Activity.SqlPoolStoredProcedure": SqlPoolStoredProcedureActivity, "Trigger.RerunTumblingWindowTrigger": RerunTumblingWindowTrigger, "Trigger.MultiplePipelineTrigger": MultiplePipelineTrigger, @@ -21451,6 +21467,8 @@ export let discriminators = { "Activity.DatabricksSparkPython": DatabricksSparkPythonActivity, "Activity.AzureFunctionActivity": AzureFunctionActivity, "Activity.ExecuteDataFlow": ExecuteDataFlowActivity, + "Activity.SynapseNotebook": SynapseNotebookActivity, + "Activity.SparkJob": SynapseSparkJobDefinitionActivity, "Trigger.ScheduleTrigger": ScheduleTrigger, "Trigger.BlobTrigger": BlobTrigger, "Trigger.BlobEventsTrigger": BlobEventsTrigger, diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 3eab86fcd719..977767e2578f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -26,9 +26,10 @@ export class BigDataPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< - BigDataPoolsListResponse - >; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; } /** @@ -44,9 +45,10 @@ export class BigDataPools { bigDataPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - BigDataPoolsGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index 010e6f89c007..3dffca96470b 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -57,7 +57,10 @@ export class DataFlow { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); + result = await this._getDataFlowsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -87,7 +90,10 @@ export class DataFlow { dataFlow, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowCreateOrUpdateDataFlowResponse >; @@ -137,8 +143,13 @@ export class DataFlow { dataFlowName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -169,8 +180,13 @@ export class DataFlow { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -260,7 +276,11 @@ const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataFlow, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index f84fba997bfa..f0e17e80dafd 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -59,7 +59,10 @@ export class DataFlowDebugSession { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); + result = await this._queryDataFlowDebugSessionsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -68,7 +71,9 @@ export class DataFlowDebugSession { private async *queryDataFlowDebugSessionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( + options + )) { yield* page; } } @@ -81,12 +86,17 @@ export class DataFlowDebugSession { async createDataFlowDebugSession( request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions - ): Promise> { + ): Promise< + LROPoller + > { const operationArguments: coreHttp.OperationArguments = { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionCreateDataFlowDebugSessionResponse >; @@ -110,14 +120,18 @@ export class DataFlowDebugSession { */ private _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions - ): Promise { + ): Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse + > { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceOperationSpec - ) as Promise; + ) as Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse + >; } /** @@ -171,7 +185,10 @@ export class DataFlowDebugSession { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionExecuteCommandResponse >; @@ -198,7 +215,9 @@ export class DataFlowDebugSession { private _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions - ): Promise { + ): Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse + > { const operationArguments: coreHttp.OperationArguments = { nextLink, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -206,7 +225,9 @@ export class DataFlowDebugSession { return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceNextOperationSpec - ) as Promise; + ) as Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse + >; } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 294e0e457bd6..0cef4925e7c0 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -57,7 +57,10 @@ export class Dataset { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDatasetsByWorkspaceNext(continuationToken, options); + result = await this._getDatasetsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -103,7 +106,10 @@ export class Dataset { dataset, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< DatasetCreateOrUpdateDatasetResponse >; @@ -134,9 +140,10 @@ export class Dataset { datasetName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getDatasetOperationSpec) as Promise< - DatasetGetDatasetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getDatasetOperationSpec + ) as Promise; } /** @@ -152,8 +159,13 @@ export class Dataset { datasetName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -184,8 +196,13 @@ export class Dataset { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -274,7 +291,11 @@ const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.datasetName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index 9a8962a761fc..4736ccb3746a 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -2,7 +2,10 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; +import { + IntegrationRuntimesListResponse, + IntegrationRuntimesGetResponse +} from "../models"; /** * Class representing a IntegrationRuntimes. @@ -22,13 +25,16 @@ export class IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - list(options?: coreHttp.OperationOptions): Promise { + list( + options?: coreHttp.OperationOptions + ): Promise { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< - IntegrationRuntimesListResponse - >; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; } /** @@ -44,9 +50,10 @@ export class IntegrationRuntimes { integrationRuntimeName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - IntegrationRuntimesGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 644f8913310e..35436c92ba04 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -57,7 +57,10 @@ export class LinkedService { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); + result = await this._getLinkedServicesByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -66,7 +69,9 @@ export class LinkedService { private async *getLinkedServicesByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { + for await (const page of this.getLinkedServicesByWorkspacePagingPage( + options + )) { yield* page; } } @@ -103,7 +108,10 @@ export class LinkedService { linkedService, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< LinkedServiceCreateOrUpdateLinkedServiceResponse >; @@ -153,8 +161,13 @@ export class LinkedService { linkedServiceName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -185,8 +198,13 @@ export class LinkedService { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -276,7 +294,11 @@ const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.linkedService, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index 6bc39f71c116..df003d827a91 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -59,7 +59,10 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebooksByWorkspaceNext(continuationToken, options); + result = await this._getNotebooksByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -101,7 +104,10 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); + result = await this._getNotebookSummaryByWorkSpaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -110,7 +116,9 @@ export class Notebook { private async *getNotebookSummaryByWorkSpacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( + options + )) { yield* page; } } @@ -163,7 +171,10 @@ export class Notebook { notebook, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< NotebookCreateOrUpdateNotebookResponse >; @@ -213,8 +224,13 @@ export class Notebook { notebookName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -245,8 +261,13 @@ export class Notebook { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -372,7 +393,11 @@ const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.notebook, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.notebookName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index b481a9d165eb..87c49bd6c9ae 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -59,7 +59,10 @@ export class Pipeline { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getPipelinesByWorkspaceNext(continuationToken, options); + result = await this._getPipelinesByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -105,7 +108,10 @@ export class Pipeline { pipeline, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< PipelineCreateOrUpdatePipelineResponse >; @@ -155,8 +161,13 @@ export class Pipeline { pipelineName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -187,8 +198,13 @@ export class Pipeline { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -297,7 +313,11 @@ const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.pipeline, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.pipelineName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 240337eb9067..9c2c3370e5cc 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -157,7 +157,11 @@ const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.filterParameters, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], + urlParameters: [ + Parameters.endpoint, + Parameters.pipelineName, + Parameters.runId + ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index b7510b8e133f..0604bdeeaf1b 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -59,7 +59,10 @@ export class SparkJobDefinition { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); + result = await this._getSparkJobDefinitionsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -68,7 +71,9 @@ export class SparkJobDefinition { private async *getSparkJobDefinitionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( + options + )) { yield* page; } } @@ -162,7 +167,10 @@ export class SparkJobDefinition { sparkJobDefinitionName, options: this.getOperationOptions(options, "location") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionExecuteSparkJobDefinitionResponse >; @@ -197,8 +205,13 @@ export class SparkJobDefinition { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -226,7 +239,10 @@ export class SparkJobDefinition { sparkJobDefinitionAzureResource, options: this.getOperationOptions(options, "location") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionDebugSparkJobDefinitionResponse >; @@ -262,7 +278,9 @@ export class SparkJobDefinition { return this.client.sendOperationRequest( operationArguments, getSparkJobDefinitionsByWorkspaceNextOperationSpec - ) as Promise; + ) as Promise< + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse + >; } private getOperationOptions( @@ -311,7 +329,11 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sparkJobDefinition, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index 375608469931..dc58292e2873 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -26,9 +26,10 @@ export class SqlPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< - SqlPoolsListResponse - >; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; } /** @@ -36,14 +37,18 @@ export class SqlPools { * @param sqlPoolName The Sql Pool name * @param options The options parameters. */ - get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise { + get( + sqlPoolName: string, + options?: coreHttp.OperationOptions + ): Promise { const operationArguments: coreHttp.OperationArguments = { sqlPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - SqlPoolsGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index 8db16a32af20..d3e08c1f3ea1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -57,7 +57,10 @@ export class SqlScript { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); + result = await this._getSqlScriptsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -163,8 +166,13 @@ export class SqlScript { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -245,7 +253,11 @@ const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sqlScript, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index c9ae4ffdeb84..4f333b059259 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -59,7 +59,10 @@ export class Trigger { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getTriggersByWorkspaceNext(continuationToken, options); + result = await this._getTriggersByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -105,7 +108,10 @@ export class Trigger { trigger, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerCreateOrUpdateTriggerResponse >; @@ -136,9 +142,10 @@ export class Trigger { triggerName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getTriggerOperationSpec) as Promise< - TriggerGetTriggerResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getTriggerOperationSpec + ) as Promise; } /** @@ -154,8 +161,13 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -183,7 +195,10 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerSubscribeTriggerToEventsResponse >; @@ -233,7 +248,10 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerUnsubscribeTriggerFromEventsResponse >; @@ -264,8 +282,13 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -293,8 +316,13 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -383,7 +411,11 @@ const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.trigger, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.triggerName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index f64442937aa7..20e37cfac269 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -2,7 +2,10 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; +import { + RunFilterParameters, + TriggerRunQueryTriggerRunsByWorkspaceResponse +} from "../models"; /** * Class representing a TriggerRun. @@ -95,7 +98,11 @@ const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], + urlParameters: [ + Parameters.endpoint, + Parameters.runId, + Parameters.triggerName + ], headerParameters: [Parameters.accept], serializer }; @@ -109,7 +116,11 @@ const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], + urlParameters: [ + Parameters.endpoint, + Parameters.runId, + Parameters.triggerName + ], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 9fc379060c1e..4c4453399fb4 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -26,9 +26,10 @@ export class Workspace { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - WorkspaceGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 550bd2db8014..058888c80532 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -56,7 +56,11 @@ const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.gitHubAccessTokenRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.clientRequestId + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index 297bf998928d..0f51ad97609c 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -18,7 +18,7 @@ "license": "MIT", "main": "./dist/synapse-managed-private-endpoints.js", "module": "./esm/index.js", - "types": "./esm/index.d.ts", + "types": "./types/synapse-managed-private-endpoints.d.ts", "devDependencies": { "typescript": "~3.9.3", "rollup": "^1.16.3", @@ -54,6 +54,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-private-endpoints.js.map'\" -o ./dist/synapse-managed-private-endpoints.min.js ./dist/synapse-managed-private-endpoints.js", "prepack": "npm install && npm run build", "build:test": "echo skip", + "pack": "npm pack 2>&1", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts index cae6cb3ab10e..c8a5ef476bf7 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts @@ -170,7 +170,8 @@ export type ManagedPrivateEndpointsListNextResponse = ManagedPrivateEndpointList /** * Optional parameters. */ -export interface ManagedPrivateEndpointsClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface ManagedPrivateEndpointsClientOptionalParams + extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts index 123c22c9a279..d354c10f3bf2 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts @@ -56,7 +56,11 @@ export class ManagedPrivateEndpoints { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._listNext(managedVirtualNetworkName, continuationToken, options); + result = await this._listNext( + managedVirtualNetworkName, + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -66,7 +70,10 @@ export class ManagedPrivateEndpoints { managedVirtualNetworkName: string, options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.listPagingPage(managedVirtualNetworkName, options)) { + for await (const page of this.listPagingPage( + managedVirtualNetworkName, + options + )) { yield* page; } } @@ -87,9 +94,10 @@ export class ManagedPrivateEndpoints { managedPrivateEndpointName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - ManagedPrivateEndpointsGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } /** @@ -111,9 +119,10 @@ export class ManagedPrivateEndpoints { managedPrivateEndpoint, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec) as Promise< - ManagedPrivateEndpointsCreateResponse - >; + return this.client.sendOperationRequest( + operationArguments, + createOperationSpec + ) as Promise; } /** @@ -132,9 +141,10 @@ export class ManagedPrivateEndpoints { managedPrivateEndpointName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec) as Promise< - coreHttp.RestResponse - >; + return this.client.sendOperationRequest( + operationArguments, + deleteOperationSpec + ) as Promise; } /** @@ -150,9 +160,10 @@ export class ManagedPrivateEndpoints { managedVirtualNetworkName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< - ManagedPrivateEndpointsListResponse - >; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; } /** @@ -171,9 +182,10 @@ export class ManagedPrivateEndpoints { nextLink, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listNextOperationSpec) as Promise< - ManagedPrivateEndpointsListNextResponse - >; + return this.client.sendOperationRequest( + operationArguments, + listNextOperationSpec + ) as Promise; } } // Operation Specifications @@ -232,7 +244,8 @@ const deleteOperationSpec: coreHttp.OperationSpec = { serializer }; const listOperationSpec: coreHttp.OperationSpec = { - path: "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", + path: + "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", httpMethod: "GET", responses: { 200: { @@ -253,7 +266,11 @@ const listNextOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.managedVirtualNetworkName, Parameters.nextLink], + urlParameters: [ + Parameters.endpoint, + Parameters.managedVirtualNetworkName, + Parameters.nextLink + ], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index a16d918b8707..afd51d04f80a 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -17,7 +17,7 @@ "license": "MIT", "main": "./dist/synapse-monitoring.js", "module": "./esm/index.js", - "types": "./esm/index.d.ts", + "types": "./types/synapse-monitoring.d.ts", "devDependencies": { "typescript": "~3.9.3", "rollup": "^1.16.3", @@ -52,6 +52,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-monitoring.js.map'\" -o ./dist/synapse-monitoring.min.js ./dist/synapse-monitoring.js", "prepack": "npm install && npm run build", + "pack": "npm pack 2>&1", "build:test": "echo skip", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", diff --git a/sdk/synapse/synapse-monitoring/src/models/index.ts b/sdk/synapse/synapse-monitoring/src/models/index.ts index 9d8c26fa69ed..4d28685ede7a 100644 --- a/sdk/synapse/synapse-monitoring/src/models/index.ts +++ b/sdk/synapse/synapse-monitoring/src/models/index.ts @@ -30,7 +30,8 @@ export interface SqlQueryStringDataModel { /** * Optional parameters. */ -export interface MonitoringGetSparkJobListOptionalParams extends coreHttp.OperationOptions { +export interface MonitoringGetSparkJobListOptionalParams + extends coreHttp.OperationOptions { /** * Can provide a guid, which is helpful for debugging and to provide better customer support */ @@ -60,7 +61,8 @@ export type MonitoringGetSparkJobListResponse = SparkJobListViewResponse & { /** * Optional parameters. */ -export interface MonitoringGetSqlJobQueryStringOptionalParams extends coreHttp.OperationOptions { +export interface MonitoringGetSqlJobQueryStringOptionalParams + extends coreHttp.OperationOptions { /** * Can provide a guid, which is helpful for debugging and to provide better customer support */ @@ -93,7 +95,8 @@ export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { /** * Optional parameters. */ -export interface MonitoringClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface MonitoringClientOptionalParams + extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts index 9c82f427c63b..53f2223463d5 100644 --- a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts +++ b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts @@ -80,7 +80,12 @@ const getSqlJobQueryStringOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.SqlQueryStringDataModel } }, - queryParameters: [Parameters.apiVersion, Parameters.filter, Parameters.orderby, Parameters.skip], + queryParameters: [ + Parameters.apiVersion, + Parameters.filter, + Parameters.orderby, + Parameters.skip + ], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.xMsClientRequestId], serializer diff --git a/sdk/synapse/synapse-spark/api-extractor.json b/sdk/synapse/synapse-spark/api-extractor.json index bee41e4ee6ab..048b504f92e3 100644 --- a/sdk/synapse/synapse-spark/api-extractor.json +++ b/sdk/synapse/synapse-spark/api-extractor.json @@ -6,7 +6,7 @@ "dtsRollup": { "enabled": true, "untrimmedFilePath": "", - "publicTrimmedFilePath": "./types/synapse-monitoring.d.ts" + "publicTrimmedFilePath": "./types/synapse-spark.d.ts" }, "messages": { "tsdocMessageReporting": { "default": { "logLevel": "none" } }, diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 0e33a1c65dfc..0b0953081a09 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -17,7 +17,7 @@ "license": "MIT", "main": "./dist/synapse-spark.js", "module": "./esm/index.js", - "types": "./esm/index.d.ts", + "types": "./types/synapse-spark.d.ts", "devDependencies": { "typescript": "~3.9.3", "rollup": "^1.16.3", @@ -52,6 +52,7 @@ "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-spark.js.map'\" -o ./dist/synapse-spark.min.js ./dist/synapse-spark.js", "prepack": "npm install && npm run build", + "pack": "npm pack 2>&1", "build:test": "echo skip", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts index 8f7ea475c8cf..c388e24dc015 100644 --- a/sdk/synapse/synapse-spark/src/models/index.ts +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -465,7 +465,8 @@ export type SparkStatementLanguageType = string; /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobsOptionalParams + extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -504,7 +505,8 @@ export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { /** * Optional parameters. */ -export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { +export interface SparkBatchCreateSparkBatchJobOptionalParams + extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -534,7 +536,8 @@ export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobOptionalParams + extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -564,7 +567,8 @@ export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionsOptionalParams + extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -603,7 +607,8 @@ export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { /** * Optional parameters. */ -export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { +export interface SparkSessionCreateSparkSessionOptionalParams + extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -633,7 +638,8 @@ export type SparkSessionCreateSparkSessionResponse = SparkSession & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionOptionalParams + extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -743,7 +749,8 @@ export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellatio /** * Optional parameters. */ -export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface SparkClientOptionalParams + extends coreHttp.ServiceClientOptions { /** * Valid api-version for the request. */ diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts index 0a8b12b48eae..d9d36b0a8b52 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -112,7 +112,11 @@ const getSparkBatchJobsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], headerParameters: [Parameters.accept], serializer }; @@ -126,7 +130,11 @@ const createSparkBatchJobOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkBatchJobOptions, queryParameters: [Parameters.detailed], - urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts index 60decb5b9384..844d4d4331ac 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -221,7 +221,11 @@ const getSparkSessionsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], headerParameters: [Parameters.accept], serializer }; @@ -235,7 +239,11 @@ const createSparkSessionOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkSessionOptions, queryParameters: [Parameters.detailed], - urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts index 0c01469a88a2..52587e47d3ef 100644 --- a/sdk/synapse/synapse-spark/src/sparkClientContext.ts +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -48,7 +48,8 @@ export class SparkClientContext extends coreHttp.ServiceClient { this.requestContentType = "application/json; charset=utf-8"; this.baseUri = - options.endpoint || "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; + options.endpoint || + "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; // Parameter assignments this.endpoint = endpoint; From 954d314b3cd049ef65ee70d80bd09274b897ee8a Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 2 Dec 2020 08:41:43 +1300 Subject: [PATCH 13/28] Rerun swagger codegen --- .../src/accessControlClient.ts | 6 +- .../synapse-accesscontrol/src/models/index.ts | 3 +- .../src/lro/azureAsyncOperationStrategy.ts | 42 +++-------- .../src/lro/bodyPollingStrategy.ts | 5 +- .../src/lro/locationStrategy.ts | 7 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 8 +-- .../synapse-artifacts/src/lro/lroPoller.ts | 27 ++------ .../synapse-artifacts/src/lro/models.ts | 8 +-- .../synapse-artifacts/src/lro/operation.ts | 4 +- .../synapse-artifacts/src/lro/requestUtils.ts | 16 +---- .../synapse-artifacts/src/models/index.ts | 69 ++++++------------- .../synapse-artifacts/src/models/mappers.ts | 46 ++++--------- .../src/operations/bigDataPools.ts | 14 ++-- .../src/operations/dataFlow.ts | 34 ++------- .../src/operations/dataFlowDebugSession.ts | 39 +++-------- .../src/operations/dataset.ts | 41 +++-------- .../src/operations/integrationRuntimes.ts | 23 +++---- .../src/operations/linkedService.ts | 38 +++------- .../src/operations/notebook.ts | 43 +++--------- .../src/operations/pipeline.ts | 34 ++------- .../src/operations/pipelineRun.ts | 6 +- .../src/operations/sparkJobDefinition.ts | 38 +++------- .../src/operations/sqlPools.ts | 19 ++--- .../src/operations/sqlScript.ts | 20 ++---- .../src/operations/trigger.ts | 60 ++++------------ .../src/operations/triggerRun.ts | 17 +---- .../src/operations/workspace.ts | 7 +- .../operations/workspaceGitRepoManagement.ts | 6 +- .../src/models/index.ts | 3 +- .../src/operations/managedPrivateEndpoints.ts | 55 +++++---------- .../synapse-monitoring/src/models/index.ts | 9 +-- .../src/operations/monitoring.ts | 7 +- sdk/synapse/synapse-spark/src/models/index.ts | 21 ++---- .../src/operations/sparkBatch.ts | 12 +--- .../src/operations/sparkSession.ts | 12 +--- .../synapse-spark/src/sparkClientContext.ts | 3 +- 36 files changed, 201 insertions(+), 601 deletions(-) diff --git a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts index 0a7ff0c8a2e3..61aeccd43e68 100644 --- a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts +++ b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts @@ -291,11 +291,7 @@ const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [ - Parameters.apiVersion, - Parameters.roleId1, - Parameters.principalId - ], + queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.continuationToken], serializer diff --git a/sdk/synapse/synapse-accesscontrol/src/models/index.ts b/sdk/synapse/synapse-accesscontrol/src/models/index.ts index e651fb487f46..8dc93686ed46 100644 --- a/sdk/synapse/synapse-accesscontrol/src/models/index.ts +++ b/sdk/synapse/synapse-accesscontrol/src/models/index.ts @@ -269,8 +269,7 @@ export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListRespons /** * Optional parameters. */ -export interface AccessControlClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index 0b5d4232590f..3c2fd2e5d6c9 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -6,11 +6,7 @@ import { FinalStateVia, LROSYM } from "./models"; -import { - OperationSpec, - OperationArguments, - OperationResponse -} from "@azure/core-http"; +import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; import { terminalStates } from "./constants"; import { SendOperationFn } from "."; @@ -21,14 +17,11 @@ export function createAzureAsyncOperationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; - let lastKnownPollingUrl = - lroData.azureAsyncOperation || lroData.operationLocation; + let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; return { isTerminal: () => { @@ -59,17 +52,12 @@ export function createAzureAsyncOperationStrategy( const initialOperationResult = initialOperation.result._response[LROSYM]; const currentOperationResult = currentOperation.result._response[LROSYM]; - if ( - !shouldPerformFinalGet(initialOperationResult, currentOperationResult) - ) { + if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { return currentOperation; } if (initialOperationResult?.requestMethod === "PUT") { - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; } @@ -77,29 +65,20 @@ export function createAzureAsyncOperationStrategy( if (initialOperationResult?.location) { switch (finalStateVia) { case "original-uri": - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; case "azure-async-operation": return currentOperation; case "location": default: - const location = - initialOperationResult.location || - currentOperationResult?.location; + const location = initialOperationResult.location || currentOperationResult?.location; if (!location) { throw new Error("Couldn't determine final GET URL from location"); } - return await sendFinalGet( - initialOperation, - sendOperationFn, - location - ); + return await sendFinalGet(initialOperation, sendOperationFn, location); } } @@ -177,10 +156,7 @@ function getCompositeMappers(responses: { }, {} as { [responseCode: string]: OperationResponse }); } -function shouldPerformFinalGet( - initialResult?: LROResponseInfo, - currentResult?: LROResponseInfo -) { +function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { const { status } = currentResult || {}; const { requestMethod: initialRequestMethod, location } = initialResult || {}; if (status && status.toLowerCase() !== "succeeded") { diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index 62ed188e691e..49333c25b430 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -44,10 +44,7 @@ export function createBodyPollingStrategy( }; // Execute the polling operation - initialOperation.result = await sendOperation( - initialOperation.args, - pollingSpec - ); + initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); return initialOperation; } }; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index cfcfa8efd0a7..008f60177503 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -8,9 +8,7 @@ export function createLocationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; @@ -51,8 +49,7 @@ export function createLocationStrategy( const result = await sendOperationFn(pollingArgs, pollingSpec); // Update latest polling url - lastKnownPollingUrl = - result._response[LROSYM]?.location || lastKnownPollingUrl; + lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; // Update lastOperation result currentOperation = { diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index e686401a5cf9..1e1ec61db3b3 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -21,12 +21,8 @@ class LROPolicy extends BaseRequestPolicy { super(nextPolicy, options); } - public async sendRequest( - webResource: WebResource - ): Promise { - let result: LROOperationResponse = await this._nextPolicy.sendRequest( - webResource - ); + public async sendRequest(webResource: WebResource): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); const _lroData = getLROData(result); result[LROSYM] = _lroData; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index 72d979bd46a9..9ab5f25fb49a 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -1,17 +1,6 @@ import { Poller } from "@azure/core-lro"; -import { - OperationSpec, - OperationArguments, - delay, - RestError -} from "@azure/core-http"; -import { - BaseResult, - LROOperationState, - LROOperationStep, - FinalStateVia, - LROSYM -} from "./models"; +import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; +import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; import { makeOperation } from "./operation"; import { createBodyPollingStrategy } from "./bodyPollingStrategy"; import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; @@ -70,11 +59,7 @@ export class LROPoller extends Poller< result: initialOperationResult }; - const pollingStrategy = getPollingStrategy( - initialOperation, - sendOperation, - finalStateVia - ); + const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); const state: LROOperationState = { // Initial operation will become the last operation @@ -127,11 +112,7 @@ function getPollingStrategy( } if (lroData.azureAsyncOperation || lroData.operationLocation) { - return createAzureAsyncOperationStrategy( - initialOperation, - sendOperationFn, - finalStateVia - ); + return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); } if (lroData.location) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index ed0dd9132876..704c731cf370 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -8,10 +8,7 @@ import { import { PollOperationState, PollOperation } from "@azure/core-lro"; export const LROSYM = Symbol("LROData"); -export type FinalStateVia = - | "azure-async-operation" - | "location" - | "original-uri"; +export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; export interface LROResponseInfo { requestMethod: HttpMethods; @@ -47,8 +44,7 @@ export interface LROOperationStep { result: TResult; } -export interface LROOperationState - extends PollOperationState { +export interface LROOperationState extends PollOperationState { lastOperation: LROOperationStep; initialOperation: LROOperationStep; pollingStrategy: LROStrategy; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index 9b37277e7b2a..e1e3fa2e1126 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -41,9 +41,7 @@ async function update( const currentLroData = currentResponse.result._response[LROSYM]; if (!currentLroData) { - throw new Error( - "Expected lroData to be defined for updating LRO operation" - ); + throw new Error("Expected lroData to be defined for updating LRO operation"); } if (state.result) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index e3289b95905b..3c518804edec 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -21,17 +21,10 @@ export function shouldDeserializeLRO(finalStateVia?: string) { isInitialRequest = false; } - if ( - initialOperationInfo.azureAsyncOperation || - initialOperationInfo.operationLocation - ) { + if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { return ( !isInitialRequest && - isAsyncOperationFinalResponse( - response, - initialOperationInfo, - finalStateVia - ) + isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) ); } @@ -69,10 +62,7 @@ function isAsyncOperationFinalResponse( return true; } - if ( - initialOperationInfo.requestMethod !== "PUT" && - !initialOperationInfo.location - ) { + if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { return true; } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 4bcfe16ee83c..274f837efafd 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -199,9 +199,7 @@ export type TriggerUnion = | TumblingWindowTrigger | ChainingTrigger; export type DataFlowUnion = MappingDataFlow; -export type IntegrationRuntimeUnion = - | ManagedIntegrationRuntime - | SelfHostedIntegrationRuntime; +export type IntegrationRuntimeUnion = ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; export type SecretBaseUnion = SecureString | AzureKeyVaultSecretReference; export type DatasetLocationUnion = | AzureBlobStorageLocation @@ -352,10 +350,7 @@ export type ExecutionActivityUnion = | ExecuteDataFlowActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity; -export type MultiplePipelineTriggerUnion = - | ScheduleTrigger - | BlobTrigger - | BlobEventsTrigger; +export type MultiplePipelineTriggerUnion = ScheduleTrigger | BlobTrigger | BlobEventsTrigger; export type TabularSourceUnion = | AzureTableSource | InformixSource @@ -3134,12 +3129,7 @@ export interface DatasetStorageFormat { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "TextFormat" - | "JsonFormat" - | "AvroFormat" - | "OrcFormat" - | "ParquetFormat"; + type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -3329,10 +3319,7 @@ export interface FormatWriteSettings { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "AvroWriteSettings" - | "DelimitedTextWriteSettings" - | "JsonWriteSettings"; + type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -14607,8 +14594,7 @@ export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceReso /** * Optional parameters. */ -export interface LinkedServiceGetLinkedServiceOptionalParams - extends coreHttp.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { /** * ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14678,8 +14664,7 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { /** * Optional parameters. */ -export interface DatasetCreateOrUpdateDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14713,8 +14698,7 @@ export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { /** * Optional parameters. */ -export interface DatasetGetDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14784,8 +14768,7 @@ export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { /** * Optional parameters. */ -export interface PipelineCreateOrUpdatePipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14819,8 +14802,7 @@ export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineGetPipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14850,8 +14832,7 @@ export type PipelineGetPipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineCreatePipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { /** * Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ @@ -14973,8 +14954,7 @@ export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { /** * Optional parameters. */ -export interface PipelineRunCancelPipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { /** * If true, cancel all the Child pipelines that are triggered by the current pipeline. */ @@ -15004,8 +14984,7 @@ export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { /** * Optional parameters. */ -export interface TriggerCreateOrUpdateTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15039,8 +15018,7 @@ export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { /** * Optional parameters. */ -export interface TriggerGetTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15178,8 +15156,7 @@ export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResp /** * Optional parameters. */ -export interface DataFlowCreateOrUpdateDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15213,8 +15190,7 @@ export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { /** * Optional parameters. */ -export interface DataFlowGetDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15412,8 +15388,7 @@ export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & /** * Optional parameters. */ -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { /** * ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15443,8 +15418,7 @@ export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { /** * Optional parameters. */ -export interface SqlScriptGetSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { /** * ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15684,8 +15658,7 @@ export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse /** * Optional parameters. */ -export interface NotebookCreateOrUpdateNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { /** * ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15719,8 +15692,7 @@ export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { /** * Optional parameters. */ -export interface NotebookGetNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { /** * ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15961,8 +15933,7 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces /** * Optional parameters. */ -export interface ArtifactsClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index 424331700c33..0b924baee621 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -14753,8 +14753,7 @@ export const TextFormat: coreHttp.CompositeMapper = { className: "TextFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, columnDelimiter: { @@ -14822,8 +14821,7 @@ export const JsonFormat: coreHttp.CompositeMapper = { className: "JsonFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, filePattern: { @@ -14867,8 +14865,7 @@ export const AvroFormat: coreHttp.CompositeMapper = { className: "AvroFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14882,8 +14879,7 @@ export const OrcFormat: coreHttp.CompositeMapper = { className: "OrcFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14897,8 +14893,7 @@ export const ParquetFormat: coreHttp.CompositeMapper = { className: "ParquetFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14985,8 +14980,7 @@ export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebAnonymousAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties } @@ -14999,8 +14993,7 @@ export const WebBasicAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebBasicAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, username: { @@ -15027,8 +15020,7 @@ export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebClientCertificateAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, pfx: { @@ -17642,9 +17634,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap ...DependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -17656,9 +17646,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap }, size: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -17677,8 +17665,7 @@ export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeKeyAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, key: { @@ -17698,8 +17685,7 @@ export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeRbacAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, resourceId: { @@ -21054,9 +21040,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper ...TriggerDependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -21067,9 +21051,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper }, size: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 977767e2578f..3eab86fcd719 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -26,10 +26,9 @@ export class BigDataPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + BigDataPoolsListResponse + >; } /** @@ -45,10 +44,9 @@ export class BigDataPools { bigDataPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + BigDataPoolsGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index 3dffca96470b..010e6f89c007 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -57,10 +57,7 @@ export class DataFlow { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDataFlowsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -90,10 +87,7 @@ export class DataFlow { dataFlow, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowCreateOrUpdateDataFlowResponse >; @@ -143,13 +137,8 @@ export class DataFlow { dataFlowName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -180,13 +169,8 @@ export class DataFlow { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -276,11 +260,7 @@ const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataFlow, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index f0e17e80dafd..f84fba997bfa 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -59,10 +59,7 @@ export class DataFlowDebugSession { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._queryDataFlowDebugSessionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -71,9 +68,7 @@ export class DataFlowDebugSession { private async *queryDataFlowDebugSessionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( - options - )) { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { yield* page; } } @@ -86,17 +81,12 @@ export class DataFlowDebugSession { async createDataFlowDebugSession( request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions - ): Promise< - LROPoller - > { + ): Promise> { const operationArguments: coreHttp.OperationArguments = { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionCreateDataFlowDebugSessionResponse >; @@ -120,18 +110,14 @@ export class DataFlowDebugSession { */ private _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - > { + ): Promise { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceOperationSpec - ) as Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - >; + ) as Promise; } /** @@ -185,10 +171,7 @@ export class DataFlowDebugSession { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionExecuteCommandResponse >; @@ -215,9 +198,7 @@ export class DataFlowDebugSession { private _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - > { + ): Promise { const operationArguments: coreHttp.OperationArguments = { nextLink, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -225,9 +206,7 @@ export class DataFlowDebugSession { return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceNextOperationSpec - ) as Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - >; + ) as Promise; } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 0cef4925e7c0..294e0e457bd6 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -57,10 +57,7 @@ export class Dataset { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDatasetsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDatasetsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -106,10 +103,7 @@ export class Dataset { dataset, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DatasetCreateOrUpdateDatasetResponse >; @@ -140,10 +134,9 @@ export class Dataset { datasetName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getDatasetOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getDatasetOperationSpec) as Promise< + DatasetGetDatasetResponse + >; } /** @@ -159,13 +152,8 @@ export class Dataset { datasetName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -196,13 +184,8 @@ export class Dataset { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -291,11 +274,7 @@ const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.datasetName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index 4736ccb3746a..9a8962a761fc 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -2,10 +2,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { - IntegrationRuntimesListResponse, - IntegrationRuntimesGetResponse -} from "../models"; +import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; /** * Class representing a IntegrationRuntimes. @@ -25,16 +22,13 @@ export class IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - list( - options?: coreHttp.OperationOptions - ): Promise { + list(options?: coreHttp.OperationOptions): Promise { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + IntegrationRuntimesListResponse + >; } /** @@ -50,10 +44,9 @@ export class IntegrationRuntimes { integrationRuntimeName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + IntegrationRuntimesGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 35436c92ba04..644f8913310e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -57,10 +57,7 @@ export class LinkedService { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getLinkedServicesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -69,9 +66,7 @@ export class LinkedService { private async *getLinkedServicesByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getLinkedServicesByWorkspacePagingPage( - options - )) { + for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { yield* page; } } @@ -108,10 +103,7 @@ export class LinkedService { linkedService, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< LinkedServiceCreateOrUpdateLinkedServiceResponse >; @@ -161,13 +153,8 @@ export class LinkedService { linkedServiceName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -198,13 +185,8 @@ export class LinkedService { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -294,11 +276,7 @@ const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.linkedService, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index df003d827a91..6bc39f71c116 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -59,10 +59,7 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebooksByWorkspaceNext( - continuationToken, - options - ); + result = await this._getNotebooksByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -104,10 +101,7 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebookSummaryByWorkSpaceNext( - continuationToken, - options - ); + result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -116,9 +110,7 @@ export class Notebook { private async *getNotebookSummaryByWorkSpacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( - options - )) { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { yield* page; } } @@ -171,10 +163,7 @@ export class Notebook { notebook, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< NotebookCreateOrUpdateNotebookResponse >; @@ -224,13 +213,8 @@ export class Notebook { notebookName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -261,13 +245,8 @@ export class Notebook { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -393,11 +372,7 @@ const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.notebook, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.notebookName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index 87c49bd6c9ae..b481a9d165eb 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -59,10 +59,7 @@ export class Pipeline { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getPipelinesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getPipelinesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -108,10 +105,7 @@ export class Pipeline { pipeline, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< PipelineCreateOrUpdatePipelineResponse >; @@ -161,13 +155,8 @@ export class Pipeline { pipelineName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -198,13 +187,8 @@ export class Pipeline { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -313,11 +297,7 @@ const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.pipeline, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.pipelineName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 9c2c3370e5cc..240337eb9067 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -157,11 +157,7 @@ const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.filterParameters, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.pipelineName, - Parameters.runId - ], + urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index 0604bdeeaf1b..b7510b8e133f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -59,10 +59,7 @@ export class SparkJobDefinition { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSparkJobDefinitionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -71,9 +68,7 @@ export class SparkJobDefinition { private async *getSparkJobDefinitionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( - options - )) { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { yield* page; } } @@ -167,10 +162,7 @@ export class SparkJobDefinition { sparkJobDefinitionName, options: this.getOperationOptions(options, "location") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionExecuteSparkJobDefinitionResponse >; @@ -205,13 +197,8 @@ export class SparkJobDefinition { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -239,10 +226,7 @@ export class SparkJobDefinition { sparkJobDefinitionAzureResource, options: this.getOperationOptions(options, "location") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionDebugSparkJobDefinitionResponse >; @@ -278,9 +262,7 @@ export class SparkJobDefinition { return this.client.sendOperationRequest( operationArguments, getSparkJobDefinitionsByWorkspaceNextOperationSpec - ) as Promise< - SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse - >; + ) as Promise; } private getOperationOptions( @@ -329,11 +311,7 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sparkJobDefinition, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index dc58292e2873..375608469931 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -26,10 +26,9 @@ export class SqlPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + SqlPoolsListResponse + >; } /** @@ -37,18 +36,14 @@ export class SqlPools { * @param sqlPoolName The Sql Pool name * @param options The options parameters. */ - get( - sqlPoolName: string, - options?: coreHttp.OperationOptions - ): Promise { + get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise { const operationArguments: coreHttp.OperationArguments = { sqlPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + SqlPoolsGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index d3e08c1f3ea1..8db16a32af20 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -57,10 +57,7 @@ export class SqlScript { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSqlScriptsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -166,13 +163,8 @@ export class SqlScript { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -253,11 +245,7 @@ const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sqlScript, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index 4f333b059259..c9ae4ffdeb84 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -59,10 +59,7 @@ export class Trigger { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getTriggersByWorkspaceNext( - continuationToken, - options - ); + result = await this._getTriggersByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -108,10 +105,7 @@ export class Trigger { trigger, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerCreateOrUpdateTriggerResponse >; @@ -142,10 +136,9 @@ export class Trigger { triggerName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getTriggerOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getTriggerOperationSpec) as Promise< + TriggerGetTriggerResponse + >; } /** @@ -161,13 +154,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -195,10 +183,7 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerSubscribeTriggerToEventsResponse >; @@ -248,10 +233,7 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerUnsubscribeTriggerFromEventsResponse >; @@ -282,13 +264,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -316,13 +293,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -411,11 +383,7 @@ const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.trigger, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.triggerName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index 20e37cfac269..f64442937aa7 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -2,10 +2,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { - RunFilterParameters, - TriggerRunQueryTriggerRunsByWorkspaceResponse -} from "../models"; +import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; /** * Class representing a TriggerRun. @@ -98,11 +95,7 @@ const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; @@ -116,11 +109,7 @@ const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 4c4453399fb4..9fc379060c1e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -26,10 +26,9 @@ export class Workspace { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + WorkspaceGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 058888c80532..550bd2db8014 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -56,11 +56,7 @@ const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.gitHubAccessTokenRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.clientRequestId - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts index c8a5ef476bf7..cae6cb3ab10e 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts @@ -170,8 +170,7 @@ export type ManagedPrivateEndpointsListNextResponse = ManagedPrivateEndpointList /** * Optional parameters. */ -export interface ManagedPrivateEndpointsClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface ManagedPrivateEndpointsClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts index d354c10f3bf2..123c22c9a279 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts @@ -56,11 +56,7 @@ export class ManagedPrivateEndpoints { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._listNext( - managedVirtualNetworkName, - continuationToken, - options - ); + result = await this._listNext(managedVirtualNetworkName, continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -70,10 +66,7 @@ export class ManagedPrivateEndpoints { managedVirtualNetworkName: string, options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.listPagingPage( - managedVirtualNetworkName, - options - )) { + for await (const page of this.listPagingPage(managedVirtualNetworkName, options)) { yield* page; } } @@ -94,10 +87,9 @@ export class ManagedPrivateEndpoints { managedPrivateEndpointName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + ManagedPrivateEndpointsGetResponse + >; } /** @@ -119,10 +111,9 @@ export class ManagedPrivateEndpoints { managedPrivateEndpoint, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - createOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, createOperationSpec) as Promise< + ManagedPrivateEndpointsCreateResponse + >; } /** @@ -141,10 +132,9 @@ export class ManagedPrivateEndpoints { managedPrivateEndpointName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - deleteOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec) as Promise< + coreHttp.RestResponse + >; } /** @@ -160,10 +150,9 @@ export class ManagedPrivateEndpoints { managedVirtualNetworkName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + ManagedPrivateEndpointsListResponse + >; } /** @@ -182,10 +171,9 @@ export class ManagedPrivateEndpoints { nextLink, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listNextOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listNextOperationSpec) as Promise< + ManagedPrivateEndpointsListNextResponse + >; } } // Operation Specifications @@ -244,8 +232,7 @@ const deleteOperationSpec: coreHttp.OperationSpec = { serializer }; const listOperationSpec: coreHttp.OperationSpec = { - path: - "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", + path: "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", httpMethod: "GET", responses: { 200: { @@ -266,11 +253,7 @@ const listNextOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.managedVirtualNetworkName, - Parameters.nextLink - ], + urlParameters: [Parameters.endpoint, Parameters.managedVirtualNetworkName, Parameters.nextLink], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-monitoring/src/models/index.ts b/sdk/synapse/synapse-monitoring/src/models/index.ts index 4d28685ede7a..9d8c26fa69ed 100644 --- a/sdk/synapse/synapse-monitoring/src/models/index.ts +++ b/sdk/synapse/synapse-monitoring/src/models/index.ts @@ -30,8 +30,7 @@ export interface SqlQueryStringDataModel { /** * Optional parameters. */ -export interface MonitoringGetSparkJobListOptionalParams - extends coreHttp.OperationOptions { +export interface MonitoringGetSparkJobListOptionalParams extends coreHttp.OperationOptions { /** * Can provide a guid, which is helpful for debugging and to provide better customer support */ @@ -61,8 +60,7 @@ export type MonitoringGetSparkJobListResponse = SparkJobListViewResponse & { /** * Optional parameters. */ -export interface MonitoringGetSqlJobQueryStringOptionalParams - extends coreHttp.OperationOptions { +export interface MonitoringGetSqlJobQueryStringOptionalParams extends coreHttp.OperationOptions { /** * Can provide a guid, which is helpful for debugging and to provide better customer support */ @@ -95,8 +93,7 @@ export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { /** * Optional parameters. */ -export interface MonitoringClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface MonitoringClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts index 53f2223463d5..9c82f427c63b 100644 --- a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts +++ b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts @@ -80,12 +80,7 @@ const getSqlJobQueryStringOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.SqlQueryStringDataModel } }, - queryParameters: [ - Parameters.apiVersion, - Parameters.filter, - Parameters.orderby, - Parameters.skip - ], + queryParameters: [Parameters.apiVersion, Parameters.filter, Parameters.orderby, Parameters.skip], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.xMsClientRequestId], serializer diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts index c388e24dc015..8f7ea475c8cf 100644 --- a/sdk/synapse/synapse-spark/src/models/index.ts +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -465,8 +465,7 @@ export type SparkStatementLanguageType = string; /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobsOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -505,8 +504,7 @@ export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { /** * Optional parameters. */ -export interface SparkBatchCreateSparkBatchJobOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -536,8 +534,7 @@ export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -567,8 +564,7 @@ export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionsOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -607,8 +603,7 @@ export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { /** * Optional parameters. */ -export interface SparkSessionCreateSparkSessionOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -638,8 +633,7 @@ export type SparkSessionCreateSparkSessionResponse = SparkSession & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -749,8 +743,7 @@ export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellatio /** * Optional parameters. */ -export interface SparkClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Valid api-version for the request. */ diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts index d9d36b0a8b52..0a8b12b48eae 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -112,11 +112,7 @@ const getSparkBatchJobsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept], serializer }; @@ -130,11 +126,7 @@ const createSparkBatchJobOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkBatchJobOptions, queryParameters: [Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts index 844d4d4331ac..60decb5b9384 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -221,11 +221,7 @@ const getSparkSessionsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept], serializer }; @@ -239,11 +235,7 @@ const createSparkSessionOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkSessionOptions, queryParameters: [Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts index 52587e47d3ef..0c01469a88a2 100644 --- a/sdk/synapse/synapse-spark/src/sparkClientContext.ts +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -48,8 +48,7 @@ export class SparkClientContext extends coreHttp.ServiceClient { this.requestContentType = "application/json; charset=utf-8"; this.baseUri = - options.endpoint || - "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; + options.endpoint || "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; // Parameter assignments this.endpoint = endpoint; From 7c4ff01129d9be0922f3bdea3fcdbab2e7322f88 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 2 Dec 2020 10:10:05 +1300 Subject: [PATCH 14/28] Add rolled up type files --- sdk/synapse/ci.yml | 38 +++++++++++++++++++ .../synapse-accesscontrol/package.json | 1 + sdk/synapse/synapse-artifacts/package.json | 1 + .../review/synapse-artifacts.api.md | 2 +- .../package.json | 1 + sdk/synapse/synapse-monitoring/package.json | 1 + sdk/synapse/synapse-spark/package.json | 1 + 7 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 sdk/synapse/ci.yml diff --git a/sdk/synapse/ci.yml b/sdk/synapse/ci.yml new file mode 100644 index 000000000000..6fa438cbd56b --- /dev/null +++ b/sdk/synapse/ci.yml @@ -0,0 +1,38 @@ +# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file. + +trigger: + branches: + include: + - master + - release/* + - hotfix/* + paths: + include: + - sdk/synapse/ + +pr: + branches: + include: + - master + - feature/* + - release/* + - hotfix/* + paths: + include: + - sdk/synapse/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: synapse + Artifacts: + - name: azure-synapse-accesscontrol + safeName: azuresynapseaccesscontrol + - name: azure-synapse-artifacts + safeName: azuresynapseartifacts + - name: azure-synapse-managed-private-endpoints + safeName: azuresynapsemanagedprivateendpoints + - name: azure-synapse-monitoring + safeName: azuresynapsemonitoring + - name: azure-synapse-spark + safeName: azuresynapsespark diff --git a/sdk/synapse/synapse-accesscontrol/package.json b/sdk/synapse/synapse-accesscontrol/package.json index f97c4061e92b..f9ebb9204608 100644 --- a/sdk/synapse/synapse-accesscontrol/package.json +++ b/sdk/synapse/synapse-accesscontrol/package.json @@ -45,6 +45,7 @@ "esm/**/*.d.ts", "esm/**/*.d.ts.map", "src/**/*.ts", + "types/synapse-accesscontrol.d.ts", "README.md", "rollup.config.js", "tsconfig.json" diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 806ba699d86b..a0cef3cb4772 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -46,6 +46,7 @@ "esm/**/*.d.ts", "esm/**/*.d.ts.map", "src/**/*.ts", + "types/synapse-artifacts.d.ts", "README.md", "rollup.config.js", "tsconfig.json" diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md index 65a431327363..7354a2fca252 100644 --- a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -6612,7 +6612,7 @@ export type ZohoSource = TabularSource & { // Warnings were encountered during analysis: // -// src/models/index.ts:15209:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts +// src/models/index.ts:15186:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts // (No @packageDocumentation comment for this package) diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index 0f51ad97609c..a8669a15fec7 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -45,6 +45,7 @@ "esm/**/*.d.ts", "esm/**/*.d.ts.map", "src/**/*.ts", + "types/synapse-managed-private-endpoints.d.ts", "README.md", "rollup.config.js", "tsconfig.json" diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index afd51d04f80a..fa201990d0b1 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -43,6 +43,7 @@ "esm/**/*.js.map", "esm/**/*.d.ts", "esm/**/*.d.ts.map", + "types/synapse-monitoring.d.ts", "src/**/*.ts", "README.md", "rollup.config.js", diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 0b0953081a09..18c3c135c0d2 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -43,6 +43,7 @@ "esm/**/*.js.map", "esm/**/*.d.ts", "esm/**/*.d.ts.map", + "types/synapse-spark.d.ts", "src/**/*.ts", "README.md", "rollup.config.js", From 908aa2156e6b2be86983c2fe04312ab70fe3080c Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 8 Dec 2020 06:28:59 +1300 Subject: [PATCH 15/28] WIP --- .../review/synapse-accesscontrol.api.md | 163 - .../src/accessControlClient.ts | 6 +- .../src/accessControlClientContext.ts | 4 + .../synapse-accesscontrol/src/models/index.ts | 3 +- .../review/synapse-artifacts.api.md | 6619 ----------------- .../src/artifactsClientContext.ts | 42 +- .../src/lro/azureAsyncOperationStrategy.ts | 42 +- .../src/lro/bodyPollingStrategy.ts | 5 +- .../src/lro/locationStrategy.ts | 7 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 8 +- .../synapse-artifacts/src/lro/lroPoller.ts | 27 +- .../synapse-artifacts/src/lro/models.ts | 8 +- .../synapse-artifacts/src/lro/operation.ts | 4 +- .../synapse-artifacts/src/lro/requestUtils.ts | 16 +- .../synapse-artifacts/src/models/index.ts | 69 +- .../synapse-artifacts/src/models/mappers.ts | 46 +- .../src/operations/bigDataPools.ts | 14 +- .../src/operations/dataFlow.ts | 34 +- .../src/operations/dataFlowDebugSession.ts | 39 +- .../src/operations/dataset.ts | 41 +- .../src/operations/integrationRuntimes.ts | 23 +- .../src/operations/linkedService.ts | 38 +- .../src/operations/notebook.ts | 43 +- .../src/operations/pipeline.ts | 34 +- .../src/operations/pipelineRun.ts | 6 +- .../src/operations/sparkJobDefinition.ts | 38 +- .../src/operations/sqlPools.ts | 19 +- .../src/operations/sqlScript.ts | 20 +- .../src/operations/trigger.ts | 60 +- .../src/operations/triggerRun.ts | 17 +- .../src/operations/workspace.ts | 7 +- .../operations/workspaceGitRepoManagement.ts | 6 +- .../synapse-spark/review/synapse-spark.api.md | 582 -- sdk/synapse/synapse-spark/src/models/index.ts | 21 +- .../src/operations/sparkBatch.ts | 12 +- .../src/operations/sparkSession.ts | 12 +- .../synapse-spark/src/sparkClientContext.ts | 7 +- 37 files changed, 592 insertions(+), 7550 deletions(-) delete mode 100644 sdk/synapse/synapse-accesscontrol/review/synapse-accesscontrol.api.md delete mode 100644 sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md delete mode 100644 sdk/synapse/synapse-spark/review/synapse-spark.api.md diff --git a/sdk/synapse/synapse-accesscontrol/review/synapse-accesscontrol.api.md b/sdk/synapse/synapse-accesscontrol/review/synapse-accesscontrol.api.md deleted file mode 100644 index 47289f4c84e5..000000000000 --- a/sdk/synapse/synapse-accesscontrol/review/synapse-accesscontrol.api.md +++ /dev/null @@ -1,163 +0,0 @@ -## API Report File for "@azure/synapse-accesscontrol" - -> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). - -```ts - -import * as coreHttp from '@azure/core-http'; -import { PagedAsyncIterableIterator } from '@azure/core-paging'; - -// @public (undocumented) -export class AccessControlClient extends AccessControlClientContext { - constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: AccessControlClientOptionalParams); - createRoleAssignment(createRoleAssignmentOptions: RoleAssignmentOptions, options?: coreHttp.OperationOptions): Promise; - deleteRoleAssignmentById(roleAssignmentId: string, options?: coreHttp.OperationOptions): Promise; - getCallerRoleAssignments(options?: coreHttp.OperationOptions): Promise; - getRoleAssignmentById(roleAssignmentId: string, options?: coreHttp.OperationOptions): Promise; - getRoleAssignments(options?: AccessControlClientGetRoleAssignmentsOptionalParams): Promise; - getRoleDefinitionById(roleId: string, options?: coreHttp.OperationOptions): Promise; - listRoleDefinitions(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; -} - -// @public (undocumented) -export class AccessControlClientContext extends coreHttp.ServiceClient { - constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: AccessControlClientOptionalParams); - // (undocumented) - apiVersion: string; - // (undocumented) - endpoint: string; -} - -// @public -export type AccessControlClientCreateRoleAssignmentResponse = RoleAssignmentDetails & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: RoleAssignmentDetails; - }; -}; - -// @public -export type AccessControlClientGetCallerRoleAssignmentsResponse = { - body: string[]; - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: string[]; - }; -}; - -// @public -export type AccessControlClientGetRoleAssignmentByIdResponse = RoleAssignmentDetails & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: RoleAssignmentDetails; - }; -}; - -// @public -export interface AccessControlClientGetRoleAssignmentsHeaders { - xMsContinuation?: string; -} - -// @public -export interface AccessControlClientGetRoleAssignmentsOptionalParams extends coreHttp.OperationOptions { - continuationToken?: string; - principalId?: string; - roleId?: string; -} - -// @public -export type AccessControlClientGetRoleAssignmentsResponse = AccessControlClientGetRoleAssignmentsHeaders & RoleAssignmentDetails[] & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: RoleAssignmentDetails[]; - parsedHeaders: AccessControlClientGetRoleAssignmentsHeaders; - }; -}; - -// @public -export type AccessControlClientGetRoleDefinitionByIdResponse = SynapseRole & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SynapseRole; - }; -}; - -// @public -export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: RolesListResponse; - }; -}; - -// @public -export type AccessControlClientGetRoleDefinitionsResponse = RolesListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: RolesListResponse; - }; -}; - -// @public -export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { - apiVersion?: string; - endpoint?: string; -} - -// @public -export interface ErrorContract { - error?: ErrorResponse; -} - -// @public (undocumented) -export interface ErrorDetail { - // (undocumented) - code: string; - // (undocumented) - message: string; - // (undocumented) - target?: string; -} - -// @public (undocumented) -export interface ErrorResponse { - // (undocumented) - code: string; - // (undocumented) - details?: ErrorDetail[]; - // (undocumented) - message: string; - // (undocumented) - target?: string; -} - -// @public -export interface RoleAssignmentDetails { - id?: string; - principalId?: string; - roleId?: string; -} - -// @public -export interface RoleAssignmentOptions { - principalId: string; - roleId: string; -} - -// @public -export interface RolesListResponse { - nextLink?: string; - value: SynapseRole[]; -} - -// @public -export interface SynapseRole { - id?: string; - isBuiltIn: boolean; - name?: string; -} - - -// (No @packageDocumentation comment for this package) - -``` diff --git a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts index 61aeccd43e68..0a7ff0c8a2e3 100644 --- a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts +++ b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts @@ -291,7 +291,11 @@ const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], + queryParameters: [ + Parameters.apiVersion, + Parameters.roleId1, + Parameters.principalId + ], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.continuationToken], serializer diff --git a/sdk/synapse/synapse-accesscontrol/src/accessControlClientContext.ts b/sdk/synapse/synapse-accesscontrol/src/accessControlClientContext.ts index 3bea87af164d..fbc5480066a2 100644 --- a/sdk/synapse/synapse-accesscontrol/src/accessControlClientContext.ts +++ b/sdk/synapse/synapse-accesscontrol/src/accessControlClientContext.ts @@ -37,6 +37,10 @@ export class AccessControlClientContext extends coreHttp.ServiceClient { options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; } + if (!options.credentialScopes) { + options.credentialScopes = ["https://microsoft.com"]; + } + super(credentials, options); this.requestContentType = "application/json; charset=utf-8"; diff --git a/sdk/synapse/synapse-accesscontrol/src/models/index.ts b/sdk/synapse/synapse-accesscontrol/src/models/index.ts index 8dc93686ed46..e651fb487f46 100644 --- a/sdk/synapse/synapse-accesscontrol/src/models/index.ts +++ b/sdk/synapse/synapse-accesscontrol/src/models/index.ts @@ -269,7 +269,8 @@ export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListRespons /** * Optional parameters. */ -export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface AccessControlClientOptionalParams + extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md deleted file mode 100644 index 7354a2fca252..000000000000 --- a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md +++ /dev/null @@ -1,6619 +0,0 @@ -## API Report File for "@azure/synapse-artifacts" - -> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). - -```ts - -import * as coreHttp from '@azure/core-http'; -import { HttpMethods } from '@azure/core-http'; -import { HttpOperationResponse } from '@azure/core-http'; -import { OperationArguments } from '@azure/core-http'; -import { OperationSpec } from '@azure/core-http'; -import { PagedAsyncIterableIterator } from '@azure/core-paging'; -import { Poller } from '@azure/core-lro'; -import { PollOperationState } from '@azure/core-lro'; -import { RestResponse } from '@azure/core-http'; - -// @public -export interface Activity { - [property: string]: any; - dependsOn?: ActivityDependency[]; - description?: string; - name: string; - type: "Container" | "Execution" | "Copy" | "HDInsightHive" | "HDInsightPig" | "HDInsightMapReduce" | "HDInsightStreaming" | "HDInsightSpark" | "ExecuteSSISPackage" | "Custom" | "SqlServerStoredProcedure" | "ExecutePipeline" | "Delete" | "AzureDataExplorerCommand" | "Lookup" | "WebActivity" | "GetMetadata" | "IfCondition" | "Switch" | "ForEach" | "AzureMLBatchExecution" | "AzureMLUpdateResource" | "AzureMLExecutePipeline" | "DataLakeAnalyticsU-SQL" | "Wait" | "Until" | "Validation" | "Filter" | "DatabricksNotebook" | "DatabricksSparkJar" | "DatabricksSparkPython" | "SetVariable" | "AppendVariable" | "AzureFunctionActivity" | "WebHook" | "ExecuteDataFlow" | "SynapseNotebook" | "SparkJob" | "SqlPoolStoredProcedure"; - userProperties?: UserProperty[]; -} - -// @public -export interface ActivityDependency { - [property: string]: any; - activity: string; - dependencyConditions: DependencyCondition[]; -} - -// @public -export interface ActivityPolicy { - [property: string]: any; - retry?: any; - retryIntervalInSeconds?: number; - secureInput?: boolean; - secureOutput?: boolean; - timeout?: any; -} - -// @public -export interface ActivityRun { - [property: string]: any; - readonly activityName?: string; - readonly activityRunEnd?: Date; - readonly activityRunId?: string; - readonly activityRunStart?: Date; - readonly activityType?: string; - readonly durationInMs?: number; - readonly error?: any; - readonly input?: any; - readonly linkedServiceName?: string; - readonly output?: any; - readonly pipelineName?: string; - readonly pipelineRunId?: string; - readonly status?: string; -} - -// @public -export interface ActivityRunsQueryResponse { - continuationToken?: string; - value: ActivityRun[]; -} - -// @public (undocumented) -export type ActivityUnion = ControlActivity | ExecutionActivityUnion | ExecutePipelineActivity | IfConditionActivity | SwitchActivity | ForEachActivity | WaitActivity | UntilActivity | ValidationActivity | FilterActivity | SetVariableActivity | AppendVariableActivity | WebHookActivity | SqlPoolStoredProcedureActivity; - -// @public -export interface AddDataFlowToDebugSessionResponse { - jobVersion?: string; -} - -// @public -export type AmazonMWSLinkedService = LinkedService & { - endpoint: any; - marketplaceID: any; - sellerID: any; - mwsAuthToken?: SecretBaseUnion; - accessKeyId: any; - secretKey?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type AmazonMWSObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type AmazonMWSSource = TabularSource & { - query?: any; -}; - -// @public -export type AmazonRedshiftLinkedService = LinkedService & { - server: any; - username?: any; - password?: SecretBaseUnion; - database: any; - port?: any; - encryptedCredential?: any; -}; - -// @public -export type AmazonRedshiftSource = TabularSource & { - query?: any; - redshiftUnloadSettings?: RedshiftUnloadSettings; -}; - -// @public -export type AmazonRedshiftTableDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type AmazonS3LinkedService = LinkedService & { - accessKeyId?: any; - secretAccessKey?: SecretBaseUnion; - serviceUrl?: any; - encryptedCredential?: any; -}; - -// @public -export type AmazonS3Location = DatasetLocation & { - bucketName?: any; - version?: any; -}; - -// @public -export type AmazonS3ReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - prefix?: any; - enablePartitionDiscovery?: boolean; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; -}; - -// @public -export type AppendVariableActivity = Activity & { - variableName?: string; - value?: any; -}; - -// @public -export interface ArtifactRenameRequest { - newName?: string; -} - -// @public (undocumented) -export class ArtifactsClient extends ArtifactsClientContext { - constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ArtifactsClientOptionalParams); - // Warning: (ae-forgotten-export) The symbol "BigDataPools" needs to be exported by the entry point index.d.ts - // - // (undocumented) - bigDataPools: BigDataPools; - // Warning: (ae-forgotten-export) The symbol "DataFlow" needs to be exported by the entry point index.d.ts - // - // (undocumented) - dataFlow: DataFlow_2; - // Warning: (ae-forgotten-export) The symbol "DataFlowDebugSession" needs to be exported by the entry point index.d.ts - // - // (undocumented) - dataFlowDebugSession: DataFlowDebugSession; - // Warning: (ae-forgotten-export) The symbol "Dataset" needs to be exported by the entry point index.d.ts - // - // (undocumented) - dataset: Dataset_2; - // Warning: (ae-forgotten-export) The symbol "IntegrationRuntimes" needs to be exported by the entry point index.d.ts - // - // (undocumented) - integrationRuntimes: IntegrationRuntimes; - // Warning: (ae-forgotten-export) The symbol "LinkedService" needs to be exported by the entry point index.d.ts - // - // (undocumented) - linkedService: LinkedService_2; - // Warning: (ae-forgotten-export) The symbol "Notebook" needs to be exported by the entry point index.d.ts - // - // (undocumented) - notebook: Notebook_2; - // Warning: (ae-forgotten-export) The symbol "Pipeline" needs to be exported by the entry point index.d.ts - // - // (undocumented) - pipeline: Pipeline; - // Warning: (ae-forgotten-export) The symbol "PipelineRun" needs to be exported by the entry point index.d.ts - // - // (undocumented) - pipelineRun: PipelineRun_2; - // Warning: (ae-forgotten-export) The symbol "SparkJobDefinition" needs to be exported by the entry point index.d.ts - // - // (undocumented) - sparkJobDefinition: SparkJobDefinition_2; - // Warning: (ae-forgotten-export) The symbol "SqlPools" needs to be exported by the entry point index.d.ts - // - // (undocumented) - sqlPools: SqlPools; - // Warning: (ae-forgotten-export) The symbol "SqlScript" needs to be exported by the entry point index.d.ts - // - // (undocumented) - sqlScript: SqlScript_2; - // Warning: (ae-forgotten-export) The symbol "Trigger" needs to be exported by the entry point index.d.ts - // - // (undocumented) - trigger: Trigger_2; - // Warning: (ae-forgotten-export) The symbol "TriggerRun" needs to be exported by the entry point index.d.ts - // - // (undocumented) - triggerRun: TriggerRun_2; - // Warning: (ae-forgotten-export) The symbol "Workspace" needs to be exported by the entry point index.d.ts - // - // (undocumented) - workspace: Workspace_2; - // Warning: (ae-forgotten-export) The symbol "WorkspaceGitRepoManagement" needs to be exported by the entry point index.d.ts - // - // (undocumented) - workspaceGitRepoManagement: WorkspaceGitRepoManagement; -} - -// @public (undocumented) -export class ArtifactsClientContext extends coreHttp.ServiceClient { - constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ArtifactsClientOptionalParams); - // (undocumented) - apiVersion: string; - // (undocumented) - endpoint: string; -} - -// @public -export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { - apiVersion?: string; - endpoint?: string; -} - -// @public -export interface AutoPauseProperties { - delayInMinutes?: number; - enabled?: boolean; -} - -// @public -export interface AutoScaleProperties { - enabled?: boolean; - maxNodeCount?: number; - minNodeCount?: number; -} - -// @public -export type AvroCompressionCodec = string; - -// @public -export type AvroDataset = Dataset & { - location?: DatasetLocationUnion; - avroCompressionCodec?: AvroCompressionCodec; - avroCompressionLevel?: number; -}; - -// @public -export type AvroFormat = DatasetStorageFormat & {}; - -// @public -export type AvroSink = CopySink & { - storeSettings?: StoreWriteSettingsUnion; - formatSettings?: AvroWriteSettings; -}; - -// @public -export type AvroSource = CopySource & { - storeSettings?: StoreReadSettingsUnion; -}; - -// @public -export type AvroWriteSettings = FormatWriteSettings & { - recordName?: string; - recordNamespace?: string; -}; - -// @public -export type AzureBatchLinkedService = LinkedService & { - accountName: any; - accessKey?: SecretBaseUnion; - batchUri: any; - poolName: any; - linkedServiceName: LinkedServiceReference; - encryptedCredential?: any; -}; - -// @public -export type AzureBlobFSLinkedService = LinkedService & { - url: any; - accountKey?: any; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureBlobFSLocation = DatasetLocation & { - fileSystem?: any; -}; - -// @public -export type AzureBlobFSReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - enablePartitionDiscovery?: boolean; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; -}; - -// @public -export type AzureBlobFSSink = CopySink & { - copyBehavior?: any; -}; - -// @public -export type AzureBlobFSSource = CopySource & { - treatEmptyAsNull?: any; - skipHeaderLineCount?: any; - recursive?: any; -}; - -// @public -export type AzureBlobFSWriteSettings = StoreWriteSettings & { - blockSizeInMB?: any; -}; - -// @public -export type AzureBlobStorageLinkedService = LinkedService & { - connectionString?: any; - accountKey?: AzureKeyVaultSecretReference; - sasUri?: any; - sasToken?: AzureKeyVaultSecretReference; - serviceEndpoint?: string; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - encryptedCredential?: string; -}; - -// @public -export type AzureBlobStorageLocation = DatasetLocation & { - container?: any; -}; - -// @public -export type AzureBlobStorageReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - prefix?: any; - enablePartitionDiscovery?: boolean; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; -}; - -// @public -export type AzureBlobStorageWriteSettings = StoreWriteSettings & { - blockSizeInMB?: any; -}; - -// @public -export type AzureDatabricksLinkedService = LinkedService & { - domain: any; - accessToken: SecretBaseUnion; - existingClusterId?: any; - instancePoolId?: any; - newClusterVersion?: any; - newClusterNumOfWorker?: any; - newClusterNodeType?: any; - newClusterSparkConf?: { - [propertyName: string]: any; - }; - newClusterSparkEnvVars?: { - [propertyName: string]: any; - }; - newClusterCustomTags?: { - [propertyName: string]: any; - }; - newClusterDriverNodeType?: any; - newClusterInitScripts?: any; - newClusterEnableElasticDisk?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureDataExplorerCommandActivity = ExecutionActivity & { - command: any; - commandTimeout?: any; -}; - -// @public -export type AzureDataExplorerLinkedService = LinkedService & { - endpoint: any; - servicePrincipalId: any; - servicePrincipalKey: SecretBaseUnion; - database: any; - tenant: any; -}; - -// @public -export type AzureDataExplorerSink = CopySink & { - ingestionMappingName?: any; - ingestionMappingAsJson?: any; - flushImmediately?: any; -}; - -// @public -export type AzureDataExplorerSource = CopySource & { - query: any; - noTruncation?: any; - queryTimeout?: any; -}; - -// @public -export type AzureDataExplorerTableDataset = Dataset & { - table?: any; -}; - -// @public -export type AzureDataLakeAnalyticsLinkedService = LinkedService & { - accountName: any; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant: any; - subscriptionId?: any; - resourceGroupName?: any; - dataLakeAnalyticsUri?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureDataLakeStoreLinkedService = LinkedService & { - dataLakeStoreUri: any; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - accountName?: any; - subscriptionId?: any; - resourceGroupName?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureDataLakeStoreLocation = DatasetLocation & {}; - -// @public -export type AzureDataLakeStoreReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - enablePartitionDiscovery?: boolean; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; -}; - -// @public -export type AzureDataLakeStoreSink = CopySink & { - copyBehavior?: any; - enableAdlsSingleFileParallel?: any; -}; - -// @public -export type AzureDataLakeStoreSource = CopySource & { - recursive?: any; -}; - -// @public -export type AzureDataLakeStoreWriteSettings = StoreWriteSettings & {}; - -// @public -export type AzureEntityResource = Resource & { - readonly etag?: string; -}; - -// @public -export type AzureFileStorageLinkedService = LinkedService & { - host: any; - userId?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type AzureFileStorageLocation = DatasetLocation & {}; - -// @public -export type AzureFileStorageReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - enablePartitionDiscovery?: boolean; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; -}; - -// @public -export type AzureFunctionActivity = ExecutionActivity & { - method: AzureFunctionActivityMethod; - functionName: any; - headers?: any; - body?: any; -}; - -// @public -export type AzureFunctionActivityMethod = string; - -// @public -export type AzureFunctionLinkedService = LinkedService & { - functionAppUrl: any; - functionKey?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type AzureKeyVaultLinkedService = LinkedService & { - baseUrl: any; -}; - -// @public -export type AzureKeyVaultSecretReference = SecretBase & { - store: LinkedServiceReference; - secretName: any; - secretVersion?: any; -}; - -// @public -export type AzureMariaDBLinkedService = LinkedService & { - connectionString?: any; - pwd?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type AzureMariaDBSource = TabularSource & { - query?: any; -}; - -// @public -export type AzureMariaDBTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export type AzureMLBatchExecutionActivity = ExecutionActivity & { - globalParameters?: { - [propertyName: string]: any; - }; - webServiceOutputs?: { - [propertyName: string]: AzureMLWebServiceFile; - }; - webServiceInputs?: { - [propertyName: string]: AzureMLWebServiceFile; - }; -}; - -// @public -export type AzureMLExecutePipelineActivity = ExecutionActivity & { - mlPipelineId: any; - experimentName?: any; - mlPipelineParameters?: any; - mlParentRunId?: any; - continueOnStepFailure?: any; -}; - -// @public -export type AzureMLLinkedService = LinkedService & { - mlEndpoint: any; - apiKey: SecretBaseUnion; - updateResourceEndpoint?: any; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureMLServiceLinkedService = LinkedService & { - subscriptionId: any; - resourceGroupName: any; - mlWorkspaceName: any; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureMLUpdateResourceActivity = ExecutionActivity & { - trainedModelName: any; - trainedModelLinkedServiceName: LinkedServiceReference; - trainedModelFilePath: any; -}; - -// @public -export interface AzureMLWebServiceFile { - filePath: any; - linkedServiceName: LinkedServiceReference; -} - -// @public -export type AzureMySqlLinkedService = LinkedService & { - connectionString: any; - password?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type AzureMySqlSink = CopySink & { - preCopyScript?: any; -}; - -// @public -export type AzureMySqlSource = TabularSource & { - query?: any; -}; - -// @public -export type AzureMySqlTableDataset = Dataset & { - tableName?: any; - table?: any; -}; - -// @public -export type AzurePostgreSqlLinkedService = LinkedService & { - connectionString?: any; - password?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type AzurePostgreSqlSink = CopySink & { - preCopyScript?: any; -}; - -// @public -export type AzurePostgreSqlSource = TabularSource & { - query?: any; -}; - -// @public -export type AzurePostgreSqlTableDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type AzureQueueSink = CopySink & {}; - -// @public -export type AzureSearchIndexDataset = Dataset & { - indexName: any; -}; - -// @public -export type AzureSearchIndexSink = CopySink & { - writeBehavior?: AzureSearchIndexWriteBehaviorType; -}; - -// @public -export type AzureSearchIndexWriteBehaviorType = string; - -// @public -export type AzureSearchLinkedService = LinkedService & { - url: any; - key?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type AzureSqlDatabaseLinkedService = LinkedService & { - connectionString: any; - password?: AzureKeyVaultSecretReference; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureSqlDWLinkedService = LinkedService & { - connectionString: any; - password?: AzureKeyVaultSecretReference; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureSqlDWTableDataset = Dataset & { - tableName?: any; - schemaTypePropertiesSchema?: any; - table?: any; -}; - -// @public -export type AzureSqlMILinkedService = LinkedService & { - connectionString: any; - password?: AzureKeyVaultSecretReference; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - encryptedCredential?: any; -}; - -// @public -export type AzureSqlMITableDataset = Dataset & { - tableName?: any; - schemaTypePropertiesSchema?: any; - table?: any; -}; - -// @public -export type AzureSqlSink = CopySink & { - sqlWriterStoredProcedureName?: any; - sqlWriterTableType?: any; - preCopyScript?: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; - storedProcedureTableTypeParameterName?: any; - tableOption?: any; -}; - -// @public -export type AzureSqlSource = TabularSource & { - sqlReaderQuery?: any; - sqlReaderStoredProcedureName?: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; - produceAdditionalTypes?: any; -}; - -// @public -export type AzureSqlTableDataset = Dataset & { - tableName?: any; - schemaTypePropertiesSchema?: any; - table?: any; -}; - -// @public -export type AzureStorageLinkedService = LinkedService & { - connectionString?: any; - accountKey?: AzureKeyVaultSecretReference; - sasUri?: any; - sasToken?: AzureKeyVaultSecretReference; - encryptedCredential?: string; -}; - -// @public -export type AzureTableDataset = Dataset & { - tableName: any; -}; - -// @public -export type AzureTableSink = CopySink & { - azureTableDefaultPartitionKeyValue?: any; - azureTablePartitionKeyName?: any; - azureTableRowKeyName?: any; - azureTableInsertType?: any; -}; - -// @public -export type AzureTableSource = TabularSource & { - azureTableSourceQuery?: any; - azureTableSourceIgnoreTableNotFound?: any; -}; - -// @public -export type AzureTableStorageLinkedService = LinkedService & { - connectionString?: any; - accountKey?: AzureKeyVaultSecretReference; - sasUri?: any; - sasToken?: AzureKeyVaultSecretReference; - encryptedCredential?: string; -}; - -// @public -export interface BigDataPoolReference { - referenceName: string; - type: BigDataPoolReferenceType; -} - -// @public -export type BigDataPoolReferenceType = string; - -// @public -export type BigDataPoolResourceInfo = TrackedResource & { - provisioningState?: string; - autoScale?: AutoScaleProperties; - creationDate?: Date; - autoPause?: AutoPauseProperties; - isComputeIsolationEnabled?: boolean; - haveLibraryRequirementsChanged?: boolean; - sessionLevelPackagesEnabled?: boolean; - sparkEventsFolder?: string; - nodeCount?: number; - libraryRequirements?: LibraryRequirements; - sparkConfigProperties?: LibraryRequirements; - sparkVersion?: string; - defaultSparkLogFolder?: string; - nodeSize?: NodeSize; - nodeSizeFamily?: NodeSizeFamily; -}; - -// @public -export interface BigDataPoolResourceInfoListResult { - nextLink?: string; - value?: BigDataPoolResourceInfo[]; -} - -// @public -export type BigDataPoolsGetResponse = BigDataPoolResourceInfo & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: BigDataPoolResourceInfo; - }; -}; - -// @public -export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: BigDataPoolResourceInfoListResult; - }; -}; - -// @public -export type BinaryDataset = Dataset & { - location?: DatasetLocationUnion; - compression?: DatasetCompressionUnion; -}; - -// @public -export type BinarySink = CopySink & { - storeSettings?: StoreWriteSettingsUnion; -}; - -// @public -export type BinarySource = CopySource & { - storeSettings?: StoreReadSettingsUnion; -}; - -// @public -export type BlobEventsTrigger = MultiplePipelineTrigger & { - blobPathBeginsWith?: string; - blobPathEndsWith?: string; - ignoreEmptyBlobs?: boolean; - events: BlobEventType[]; - scope: string; -}; - -// @public -export type BlobEventType = string; - -// @public -export type BlobSink = CopySink & { - blobWriterOverwriteFiles?: any; - blobWriterDateTimeFormat?: any; - blobWriterAddHeader?: any; - copyBehavior?: any; -}; - -// @public -export type BlobSource = CopySource & { - treatEmptyAsNull?: any; - skipHeaderLineCount?: any; - recursive?: any; -}; - -// @public -export type BlobTrigger = MultiplePipelineTrigger & { - folderPath: string; - maxConcurrency: number; - linkedService: LinkedServiceReference; -}; - -// @public -export type CassandraLinkedService = LinkedService & { - host: any; - authenticationType?: any; - port?: any; - username?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type CassandraSource = TabularSource & { - query?: any; - consistencyLevel?: CassandraSourceReadConsistencyLevels; -}; - -// @public -export type CassandraSourceReadConsistencyLevels = string; - -// @public -export type CassandraTableDataset = Dataset & { - tableName?: any; - keyspace?: any; -}; - -// @public -export type CellOutputType = string; - -// @public -export type ChainingTrigger = Trigger & { - pipeline: TriggerPipelineReference; - dependsOn: PipelineReference[]; - runDimension: string; -}; - -// @public -export interface CloudError { - code: string; - details?: CloudError[]; - message: string; - target?: string; -} - -// @public -export type CommonDataServiceForAppsEntityDataset = Dataset & { - entityName?: any; -}; - -// @public -export type CommonDataServiceForAppsLinkedService = LinkedService & { - deploymentType: DynamicsDeploymentType; - hostName?: any; - port?: any; - serviceUri?: any; - organizationName?: any; - authenticationType: DynamicsAuthenticationType; - username?: any; - password?: SecretBaseUnion; - servicePrincipalId?: any; - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; - servicePrincipalCredential?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type CommonDataServiceForAppsSink = CopySink & { - writeBehavior: DynamicsSinkWriteBehavior; - ignoreNullValues?: any; - alternateKeyName?: any; -}; - -// @public -export type CommonDataServiceForAppsSource = CopySource & { - query?: any; -}; - -// @public -export type ConcurLinkedService = LinkedService & { - clientId: any; - username: any; - password?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type ConcurObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type ConcurSource = TabularSource & { - query?: any; -}; - -// @public -export type ControlActivity = Activity & {}; - -// @public -export type CopyActivity = ExecutionActivity & { - inputs?: DatasetReference[]; - outputs?: DatasetReference[]; - source: CopySourceUnion; - sink: CopySinkUnion; - translator?: any; - enableStaging?: any; - stagingSettings?: StagingSettings; - parallelCopies?: any; - dataIntegrationUnits?: any; - enableSkipIncompatibleRow?: any; - redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings; - preserveRules?: any[]; - preserve?: any[]; -}; - -// @public -export type CopyBehaviorType = string; - -// @public -export interface CopySink { - [property: string]: any; - maxConcurrentConnections?: any; - sinkRetryCount?: any; - sinkRetryWait?: any; - type: "DelimitedTextSink" | "JsonSink" | "OrcSink" | "AzurePostgreSqlSink" | "AzureMySqlSink" | "SapCloudForCustomerSink" | "AzureQueueSink" | "AzureTableSink" | "AvroSink" | "ParquetSink" | "BinarySink" | "BlobSink" | "FileSystemSink" | "DocumentDbCollectionSink" | "CosmosDbSqlApiSink" | "SqlSink" | "SqlServerSink" | "AzureSqlSink" | "SqlMISink" | "SqlDWSink" | "OracleSink" | "AzureDataLakeStoreSink" | "AzureBlobFSSink" | "AzureSearchIndexSink" | "OdbcSink" | "InformixSink" | "MicrosoftAccessSink" | "DynamicsSink" | "DynamicsCrmSink" | "CommonDataServiceForAppsSink" | "AzureDataExplorerSink" | "SalesforceSink" | "SalesforceServiceCloudSink" | "CosmosDbMongoDbApiSink"; - writeBatchSize?: any; - writeBatchTimeout?: any; -} - -// @public (undocumented) -export type CopySinkUnion = DelimitedTextSink | JsonSink | OrcSink | AzurePostgreSqlSink | AzureMySqlSink | SapCloudForCustomerSink | AzureQueueSink | AzureTableSink | AvroSink | ParquetSink | BinarySink | BlobSink | FileSystemSink | DocumentDbCollectionSink | CosmosDbSqlApiSink | SqlSink | SqlServerSink | AzureSqlSink | SqlMISink | SqlDWSink | OracleSink | AzureDataLakeStoreSink | AzureBlobFSSink | AzureSearchIndexSink | OdbcSink | InformixSink | MicrosoftAccessSink | DynamicsSink | DynamicsCrmSink | CommonDataServiceForAppsSink | AzureDataExplorerSink | SalesforceSink | SalesforceServiceCloudSink | CosmosDbMongoDbApiSink; - -// @public -export interface CopySource { - [property: string]: any; - maxConcurrentConnections?: any; - sourceRetryCount?: any; - sourceRetryWait?: any; - type: "AvroSource" | "ParquetSource" | "DelimitedTextSource" | "JsonSource" | "OrcSource" | "BinarySource" | "TabularSource" | "AzureTableSource" | "BlobSource" | "DocumentDbCollectionSource" | "CosmosDbSqlApiSource" | "DynamicsSource" | "DynamicsCrmSource" | "CommonDataServiceForAppsSource" | "RelationalSource" | "InformixSource" | "MicrosoftAccessSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "ODataSource" | "SalesforceSource" | "SalesforceServiceCloudSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "RestSource" | "SqlSource" | "SqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "FileSystemSource" | "HdfsSource" | "AzureMySqlSource" | "AzureDataExplorerSource" | "OracleSource" | "TeradataSource" | "WebSource" | "CassandraSource" | "MongoDbSource" | "MongoDbV2Source" | "CosmosDbMongoDbApiSource" | "Office365Source" | "AzureDataLakeStoreSource" | "AzureBlobFSSource" | "HttpSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource"; -} - -// @public (undocumented) -export type CopySourceUnion = AvroSource | ParquetSource | DelimitedTextSource | JsonSource | OrcSource | BinarySource | TabularSourceUnion | BlobSource | DocumentDbCollectionSource | CosmosDbSqlApiSource | DynamicsSource | DynamicsCrmSource | CommonDataServiceForAppsSource | RelationalSource | MicrosoftAccessSource | ODataSource | SalesforceServiceCloudSource | RestSource | FileSystemSource | HdfsSource | AzureDataExplorerSource | OracleSource | WebSource | MongoDbSource | MongoDbV2Source | CosmosDbMongoDbApiSource | Office365Source | AzureDataLakeStoreSource | AzureBlobFSSource | HttpSource; - -// @public -export interface CopyTranslator { - [property: string]: any; - type: "TabularTranslator"; -} - -// @public (undocumented) -export type CopyTranslatorUnion = TabularTranslator; - -// @public -export type CosmosDbLinkedService = LinkedService & { - connectionString?: any; - accountEndpoint?: any; - database?: any; - accountKey?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type CosmosDbMongoDbApiCollectionDataset = Dataset & { - collection: any; -}; - -// @public -export type CosmosDbMongoDbApiLinkedService = LinkedService & { - connectionString: any; - database: any; -}; - -// @public -export type CosmosDbMongoDbApiSink = CopySink & { - writeBehavior?: any; -}; - -// @public -export type CosmosDbMongoDbApiSource = CopySource & { - filter?: any; - cursorMethods?: MongoDbCursorMethodsProperties; - batchSize?: any; - queryTimeout?: any; -}; - -// @public -export type CosmosDbSqlApiCollectionDataset = Dataset & { - collectionName: any; -}; - -// @public -export type CosmosDbSqlApiSink = CopySink & { - writeBehavior?: any; -}; - -// @public -export type CosmosDbSqlApiSource = CopySource & { - query?: any; - pageSize?: any; - preferredRegions?: any; -}; - -// @public -export type CouchbaseLinkedService = LinkedService & { - connectionString?: any; - credString?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type CouchbaseSource = TabularSource & { - query?: any; -}; - -// @public -export type CouchbaseTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export interface CreateDataFlowDebugSessionRequest { - clusterTimeout?: number; - dataBricksLinkedService?: LinkedServiceResource; - dataFlowName?: string; - existingClusterId?: string; - newClusterName?: string; - newClusterNodeType?: string; -} - -// @public -export interface CreateDataFlowDebugSessionResponse { - sessionId?: string; -} - -// @public -export interface CreateRunResponse { - runId: string; -} - -// @public -export type CustomActivity = ExecutionActivity & { - command: any; - resourceLinkedService?: LinkedServiceReference; - folderPath?: any; - referenceObjects?: CustomActivityReferenceObject; - extendedProperties?: { - [propertyName: string]: any; - }; - retentionTimeInDays?: any; -}; - -// @public -export interface CustomActivityReferenceObject { - datasets?: DatasetReference[]; - linkedServices?: LinkedServiceReference[]; -} - -// @public -export type CustomDataset = Dataset & { - typeProperties?: any; -}; - -// @public -export type CustomDataSourceLinkedService = LinkedService & { - typeProperties: any; -}; - -// @public -export interface CustomerManagedKeyDetails { - key?: WorkspaceKeyDetails; - readonly status?: string; -} - -// @public -export interface CustomSetupBase { - type: string; -} - -// @public -export type DatabricksNotebookActivity = ExecutionActivity & { - notebookPath: any; - baseParameters?: { - [propertyName: string]: any; - }; - libraries?: { - [propertyName: string]: any; - }[]; -}; - -// @public -export type DatabricksSparkJarActivity = ExecutionActivity & { - mainClassName: any; - parameters?: any[]; - libraries?: { - [propertyName: string]: any; - }[]; -}; - -// @public -export type DatabricksSparkPythonActivity = ExecutionActivity & { - pythonFile: any; - parameters?: any[]; - libraries?: { - [propertyName: string]: any; - }[]; -}; - -// @public -export interface DataFlow { - annotations?: any[]; - description?: string; - folder?: DataFlowFolder; - type: "MappingDataFlow"; -} - -// @public -export type DataFlowComputeType = string; - -// @public -export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { - ifMatch?: string; -} - -// @public -export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DataFlowResource; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface DataFlowDebugCommandRequest { - commandName?: string; - commandPayload: any; - dataFlowName?: string; - sessionId: string; -} - -// @public -export interface DataFlowDebugCommandResponse { - data?: string; - status?: string; -} - -// @public -export interface DataFlowDebugPackage { - [property: string]: any; - dataFlow?: DataFlowDebugResource; - datasets?: DatasetDebugResource[]; - debugSettings?: DataFlowDebugPackageDebugSettings; - linkedServices?: LinkedServiceDebugResource[]; - sessionId?: string; - staging?: DataFlowStagingInfo; -} - -// @public -export interface DataFlowDebugPackageDebugSettings { - datasetParameters?: any; - parameters?: { - [propertyName: string]: any; - }; - sourceSettings?: DataFlowSourceSetting[]; -} - -// @public -export interface DataFlowDebugPreviewDataRequest { - dataFlowName?: string; - rowLimits?: number; - sessionId?: string; - streamName?: string; -} - -// @public -export interface DataFlowDebugQueryResponse { - runId?: string; -} - -// @public -export type DataFlowDebugResource = SubResourceDebugResource & { - properties: DataFlowUnion; -}; - -// @public -export interface DataFlowDebugResultResponse { - data?: string; - status?: string; -} - -// @public -export type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: AddDataFlowToDebugSessionResponse; - }; -}; - -// @public -export interface DataFlowDebugSessionCreateDataFlowDebugSessionHeaders { - location?: string; -} - -// @public -export type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: CreateDataFlowDebugSessionResponse; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface DataFlowDebugSessionExecuteCommandHeaders { - location?: string; -} - -// @public -export type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DataFlowDebugCommandResponse; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface DataFlowDebugSessionInfo { - [property: string]: any; - computeType?: string; - coreCount?: number; - dataFlowName?: string; - integrationRuntimeName?: string; - lastActivityTime?: string; - nodeCount?: number; - sessionId?: string; - startTime?: string; - timeToLiveInMinutes?: number; -} - -// @public -export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: QueryDataFlowDebugSessionsResponse; - }; -}; - -// @public -export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: QueryDataFlowDebugSessionsResponse; - }; -}; - -// @public -export interface DataFlowDebugStatisticsRequest { - columns?: string[]; - dataFlowName?: string; - sessionId?: string; - streamName?: string; -} - -// @public -export interface DataFlowFolder { - name?: string; -} - -// @public -export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { - ifNoneMatch?: string; -} - -// @public -export type DataFlowGetDataFlowResponse = DataFlowResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DataFlowResource; - }; -}; - -// @public -export type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DataFlowListResponse; - }; -}; - -// @public -export type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DataFlowListResponse; - }; -}; - -// @public -export interface DataFlowListResponse { - nextLink?: string; - value: DataFlowResource[]; -} - -// @public -export interface DataFlowReference { - [property: string]: any; - datasetParameters?: any; - referenceName: string; - type: DataFlowReferenceType; -} - -// @public -export type DataFlowReferenceType = string; - -// @public -export type DataFlowResource = AzureEntityResource & { - properties: DataFlowUnion; -}; - -// @public -export type DataFlowSink = Transformation & { - dataset?: DatasetReference; -}; - -// @public -export type DataFlowSource = Transformation & { - dataset?: DatasetReference; -}; - -// @public -export interface DataFlowSourceSetting { - [property: string]: any; - rowLimit?: number; - sourceName?: string; -} - -// @public -export interface DataFlowStagingInfo { - folderPath?: string; - linkedService?: LinkedServiceReference; -} - -// @public (undocumented) -export type DataFlowUnion = MappingDataFlow; - -// @public -export type DataLakeAnalyticsUsqlActivity = ExecutionActivity & { - scriptPath: any; - scriptLinkedService: LinkedServiceReference; - degreeOfParallelism?: any; - priority?: any; - parameters?: { - [propertyName: string]: any; - }; - runtimeVersion?: any; - compilationMode?: any; -}; - -// @public -export interface DataLakeStorageAccountDetails { - accountUrl?: string; - filesystem?: string; -} - -// @public -export interface Dataset { - [property: string]: any; - annotations?: any[]; - description?: string; - folder?: DatasetFolder; - linkedServiceName: LinkedServiceReference; - parameters?: { - [propertyName: string]: ParameterSpecification; - }; - schema?: any; - structure?: any; - type: "Avro" | "Parquet" | "DelimitedText" | "Json" | "Orc" | "Binary" | "AzureTable" | "AzureSqlTable" | "AzureSqlMITable" | "AzureSqlDWTable" | "CassandraTable" | "CustomDataset" | "CosmosDbSqlApiCollection" | "DocumentDbCollection" | "DynamicsEntity" | "DynamicsCrmEntity" | "CommonDataServiceForAppsEntity" | "Office365Table" | "MongoDbCollection" | "MongoDbV2Collection" | "CosmosDbMongoDbApiCollection" | "ODataResource" | "OracleTable" | "TeradataTable" | "AzureMySqlTable" | "AmazonRedshiftTable" | "Db2Table" | "RelationalTable" | "InformixTable" | "OdbcTable" | "MySqlTable" | "PostgreSqlTable" | "MicrosoftAccessTable" | "SalesforceObject" | "SalesforceServiceCloudObject" | "SybaseTable" | "SapBwCube" | "SapCloudForCustomerResource" | "SapEccResource" | "SapHanaTable" | "SapOpenHubTable" | "SqlServerTable" | "RestResource" | "SapTableResource" | "WebTable" | "AzureSearchIndex" | "AmazonMWSObject" | "AzurePostgreSqlTable" | "ConcurObject" | "CouchbaseTable" | "DrillTable" | "EloquaObject" | "GoogleBigQueryObject" | "GreenplumTable" | "HBaseObject" | "HiveObject" | "HubspotObject" | "ImpalaObject" | "JiraObject" | "MagentoObject" | "MariaDBTable" | "AzureMariaDBTable" | "MarketoObject" | "PaypalObject" | "PhoenixObject" | "PrestoObject" | "QuickBooksObject" | "ServiceNowObject" | "ShopifyObject" | "SparkObject" | "SquareObject" | "XeroObject" | "ZohoObject" | "NetezzaTable" | "VerticaTable" | "SalesforceMarketingCloudObject" | "ResponsysObject" | "DynamicsAXResource" | "OracleServiceCloudObject" | "AzureDataExplorerTable" | "GoogleAdWordsObject"; -} - -// @public -export type DatasetBZip2Compression = DatasetCompression & {}; - -// @public -export interface DatasetCompression { - [property: string]: any; - type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate"; -} - -// @public -export type DatasetCompressionLevel = string; - -// @public (undocumented) -export type DatasetCompressionUnion = DatasetBZip2Compression | DatasetGZipCompression | DatasetDeflateCompression | DatasetZipDeflateCompression; - -// @public -export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { - ifMatch?: string; -} - -// @public -export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DatasetResource; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface DatasetDataElement { - name?: any; - type?: any; -} - -// @public -export type DatasetDebugResource = SubResourceDebugResource & { - properties: DatasetUnion; -}; - -// @public -export type DatasetDeflateCompression = DatasetCompression & { - level?: DatasetCompressionLevel; -}; - -// @public -export interface DatasetFolder { - name?: string; -} - -// @public -export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { - ifNoneMatch?: string; -} - -// @public -export type DatasetGetDatasetResponse = DatasetResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DatasetResource; - }; -}; - -// @public -export type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DatasetListResponse; - }; -}; - -// @public -export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: DatasetListResponse; - }; -}; - -// @public -export type DatasetGZipCompression = DatasetCompression & { - level?: DatasetCompressionLevel; -}; - -// @public -export interface DatasetListResponse { - nextLink?: string; - value: DatasetResource[]; -} - -// @public -export interface DatasetLocation { - [property: string]: any; - fileName?: any; - folderPath?: any; - type: "AzureBlobStorageLocation" | "AzureBlobFSLocation" | "AzureDataLakeStoreLocation" | "AmazonS3Location" | "FileServerLocation" | "AzureFileStorageLocation" | "GoogleCloudStorageLocation" | "FtpServerLocation" | "SftpLocation" | "HttpServerLocation" | "HdfsLocation"; -} - -// @public (undocumented) -export type DatasetLocationUnion = AzureBlobStorageLocation | AzureBlobFSLocation | AzureDataLakeStoreLocation | AmazonS3Location | FileServerLocation | AzureFileStorageLocation | GoogleCloudStorageLocation | FtpServerLocation | SftpLocation | HttpServerLocation | HdfsLocation; - -// @public -export interface DatasetReference { - parameters?: { - [propertyName: string]: any; - }; - referenceName: string; - type: DatasetReferenceType; -} - -// @public -export type DatasetReferenceType = string; - -// @public -export type DatasetResource = AzureEntityResource & { - properties: DatasetUnion; -}; - -// @public -export interface DatasetSchemaDataElement { - [property: string]: any; - name?: any; - type?: any; -} - -// @public -export interface DatasetStorageFormat { - [property: string]: any; - deserializer?: any; - serializer?: any; - type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; -} - -// @public (undocumented) -export type DatasetStorageFormatUnion = TextFormat | JsonFormat | AvroFormat | OrcFormat | ParquetFormat; - -// @public (undocumented) -export type DatasetUnion = AvroDataset | ParquetDataset | DelimitedTextDataset | JsonDataset | OrcDataset | BinaryDataset | AzureTableDataset | AzureSqlTableDataset | AzureSqlMITableDataset | AzureSqlDWTableDataset | CassandraTableDataset | CustomDataset | CosmosDbSqlApiCollectionDataset | DocumentDbCollectionDataset | DynamicsEntityDataset | DynamicsCrmEntityDataset | CommonDataServiceForAppsEntityDataset | Office365Dataset | MongoDbCollectionDataset | MongoDbV2CollectionDataset | CosmosDbMongoDbApiCollectionDataset | ODataResourceDataset | OracleTableDataset | TeradataTableDataset | AzureMySqlTableDataset | AmazonRedshiftTableDataset | Db2TableDataset | RelationalTableDataset | InformixTableDataset | OdbcTableDataset | MySqlTableDataset | PostgreSqlTableDataset | MicrosoftAccessTableDataset | SalesforceObjectDataset | SalesforceServiceCloudObjectDataset | SybaseTableDataset | SapBwCubeDataset | SapCloudForCustomerResourceDataset | SapEccResourceDataset | SapHanaTableDataset | SapOpenHubTableDataset | SqlServerTableDataset | RestResourceDataset | SapTableResourceDataset | WebTableDataset | AzureSearchIndexDataset | AmazonMWSObjectDataset | AzurePostgreSqlTableDataset | ConcurObjectDataset | CouchbaseTableDataset | DrillTableDataset | EloquaObjectDataset | GoogleBigQueryObjectDataset | GreenplumTableDataset | HBaseObjectDataset | HiveObjectDataset | HubspotObjectDataset | ImpalaObjectDataset | JiraObjectDataset | MagentoObjectDataset | MariaDBTableDataset | AzureMariaDBTableDataset | MarketoObjectDataset | PaypalObjectDataset | PhoenixObjectDataset | PrestoObjectDataset | QuickBooksObjectDataset | ServiceNowObjectDataset | ShopifyObjectDataset | SparkObjectDataset | SquareObjectDataset | XeroObjectDataset | ZohoObjectDataset | NetezzaTableDataset | VerticaTableDataset | SalesforceMarketingCloudObjectDataset | ResponsysObjectDataset | DynamicsAXResourceDataset | OracleServiceCloudObjectDataset | AzureDataExplorerTableDataset | GoogleAdWordsObjectDataset; - -// @public -export type DatasetZipDeflateCompression = DatasetCompression & { - level?: DatasetCompressionLevel; -}; - -// @public -export type DayOfWeek = "Sunday" | "Monday" | "Tuesday" | "Wednesday" | "Thursday" | "Friday" | "Saturday"; - -// @public -export type Db2AuthenticationType = string; - -// @public -export type Db2LinkedService = LinkedService & { - server: any; - database: any; - authenticationType?: Db2AuthenticationType; - username?: any; - password?: SecretBaseUnion; - packageCollection?: any; - certificateCommonName?: any; - encryptedCredential?: any; -}; - -// @public -export type Db2Source = TabularSource & { - query?: any; -}; - -// @public -export type Db2TableDataset = Dataset & { - tableName?: any; - schemaTypePropertiesSchema?: any; - table?: any; -}; - -// @public -export type DeleteActivity = ExecutionActivity & { - recursive?: any; - maxConcurrentConnections?: number; - enableLogging?: any; - logStorageSettings?: LogStorageSettings; - dataset: DatasetReference; -}; - -// @public -export interface DeleteDataFlowDebugSessionRequest { - dataFlowName?: string; - sessionId?: string; -} - -// @public -export type DelimitedTextCompressionCodec = string; - -// @public -export type DelimitedTextDataset = Dataset & { - location?: DatasetLocationUnion; - columnDelimiter?: any; - rowDelimiter?: any; - encodingName?: any; - compressionCodec?: DelimitedTextCompressionCodec; - compressionLevel?: DatasetCompressionLevel; - quoteChar?: any; - escapeChar?: any; - firstRowAsHeader?: any; - nullValue?: any; -}; - -// @public -export type DelimitedTextReadSettings = FormatReadSettings & { - skipLineCount?: any; -}; - -// @public -export type DelimitedTextSink = CopySink & { - storeSettings?: StoreWriteSettingsUnion; - formatSettings?: DelimitedTextWriteSettings; -}; - -// @public -export type DelimitedTextSource = CopySource & { - storeSettings?: StoreReadSettingsUnion; - formatSettings?: DelimitedTextReadSettings; -}; - -// @public -export type DelimitedTextWriteSettings = FormatWriteSettings & { - quoteAllText?: any; - fileExtension: any; -}; - -// @public -export type DependencyCondition = string; - -// @public -export interface DependencyReference { - type: "TriggerDependencyReference" | "TumblingWindowTriggerDependencyReference" | "SelfDependencyTumblingWindowTriggerReference"; -} - -// @public (undocumented) -export type DependencyReferenceUnion = TriggerDependencyReferenceUnion | SelfDependencyTumblingWindowTriggerReference; - -// @public -export interface DistcpSettings { - distcpOptions?: any; - resourceManagerEndpoint: any; - tempScriptPath: any; -} - -// @public -export type DocumentDbCollectionDataset = Dataset & { - collectionName: any; -}; - -// @public -export type DocumentDbCollectionSink = CopySink & { - nestingSeparator?: any; - writeBehavior?: any; -}; - -// @public -export type DocumentDbCollectionSource = CopySource & { - query?: any; - nestingSeparator?: any; - queryTimeout?: any; -}; - -// @public -export type DrillLinkedService = LinkedService & { - connectionString?: any; - pwd?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type DrillSource = TabularSource & { - query?: any; -}; - -// @public -export type DrillTableDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export interface DWCopyCommandDefaultValue { - columnName?: any; - defaultValue?: any; -} - -// @public -export interface DWCopyCommandSettings { - additionalOptions?: { - [propertyName: string]: string; - }; - defaultValues?: DWCopyCommandDefaultValue[]; -} - -// @public -export type DynamicsAuthenticationType = string; - -// @public -export type DynamicsAXLinkedService = LinkedService & { - url: any; - servicePrincipalId: any; - servicePrincipalKey: SecretBaseUnion; - tenant: any; - aadResourceId: any; - encryptedCredential?: any; -}; - -// @public -export type DynamicsAXResourceDataset = Dataset & { - path: any; -}; - -// @public -export type DynamicsAXSource = TabularSource & { - query?: any; -}; - -// @public -export type DynamicsCrmEntityDataset = Dataset & { - entityName?: any; -}; - -// @public -export type DynamicsCrmLinkedService = LinkedService & { - deploymentType: DynamicsDeploymentType; - hostName?: any; - port?: any; - serviceUri?: any; - organizationName?: any; - authenticationType: DynamicsAuthenticationType; - username?: any; - password?: SecretBaseUnion; - servicePrincipalId?: any; - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; - servicePrincipalCredential?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type DynamicsCrmSink = CopySink & { - writeBehavior: DynamicsSinkWriteBehavior; - ignoreNullValues?: any; - alternateKeyName?: any; -}; - -// @public -export type DynamicsCrmSource = CopySource & { - query?: any; -}; - -// @public -export type DynamicsDeploymentType = string; - -// @public -export type DynamicsEntityDataset = Dataset & { - entityName?: any; -}; - -// @public -export type DynamicsLinkedService = LinkedService & { - deploymentType: DynamicsDeploymentType; - hostName?: string; - port?: string; - serviceUri?: string; - organizationName?: string; - authenticationType: DynamicsAuthenticationType; - username?: any; - password?: SecretBaseUnion; - servicePrincipalId?: any; - servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; - servicePrincipalCredential?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type DynamicsServicePrincipalCredentialType = string; - -// @public -export type DynamicsSink = CopySink & { - writeBehavior: DynamicsSinkWriteBehavior; - ignoreNullValues?: any; - alternateKeyName?: any; -}; - -// @public -export type DynamicsSinkWriteBehavior = string; - -// @public -export type DynamicsSource = CopySource & { - query?: any; -}; - -// @public -export type EloquaLinkedService = LinkedService & { - endpoint: any; - username: any; - password?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type EloquaObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type EloquaSource = TabularSource & { - query?: any; -}; - -// @public -export interface EncryptionDetails { - cmk?: CustomerManagedKeyDetails; - readonly doubleEncryptionEnabled?: boolean; -} - -// @public -export interface EntityReference { - referenceName?: string; - type?: IntegrationRuntimeEntityReferenceType; -} - -// @public -export interface ErrorAdditionalInfo { - readonly info?: any; - readonly type?: string; -} - -// @public -export interface ErrorContract { - error?: ErrorResponse; -} - -// @public -export interface ErrorResponse { - readonly additionalInfo?: ErrorAdditionalInfo[]; - readonly code?: string; - readonly details?: ErrorResponse[]; - readonly message?: string; - readonly target?: string; -} - -// @public -export interface EvaluateDataFlowExpressionRequest { - dataFlowName?: string; - expression?: string; - rowLimits?: number; - sessionId?: string; - streamName?: string; -} - -// @public -export type EventSubscriptionStatus = string; - -// @public -export type ExecuteDataFlowActivity = ExecutionActivity & { - dataFlow: DataFlowReference; - staging?: DataFlowStagingInfo; - integrationRuntime?: IntegrationRuntimeReference; - compute?: ExecuteDataFlowActivityTypePropertiesCompute; -}; - -// @public -export interface ExecuteDataFlowActivityTypePropertiesCompute { - computeType?: DataFlowComputeType; - coreCount?: number; -} - -// @public -export type ExecutePipelineActivity = Activity & { - pipeline: PipelineReference; - parameters?: { - [propertyName: string]: any; - }; - waitOnCompletion?: boolean; -}; - -// @public -export type ExecuteSsisPackageActivity = ExecutionActivity & { - packageLocation: SsisPackageLocation; - runtime?: any; - loggingLevel?: any; - environmentPath?: any; - executionCredential?: SsisExecutionCredential; - connectVia: IntegrationRuntimeReference; - projectParameters?: { - [propertyName: string]: SsisExecutionParameter; - }; - packageParameters?: { - [propertyName: string]: SsisExecutionParameter; - }; - projectConnectionManagers?: { - [propertyName: string]: any; - }; - packageConnectionManagers?: { - [propertyName: string]: any; - }; - propertyOverrides?: { - [propertyName: string]: SsisPropertyOverride; - }; - logLocation?: SsisLogLocation; -}; - -// @public -export type ExecutionActivity = Activity & { - linkedServiceName?: LinkedServiceReference; - policy?: ActivityPolicy; -}; - -// @public (undocumented) -export type ExecutionActivityUnion = CopyActivity | HDInsightHiveActivity | HDInsightPigActivity | HDInsightMapReduceActivity | HDInsightStreamingActivity | HDInsightSparkActivity | ExecuteSsisPackageActivity | CustomActivity | SqlServerStoredProcedureActivity | DeleteActivity | AzureDataExplorerCommandActivity | LookupActivity | WebActivity | GetMetadataActivity | AzureMLBatchExecutionActivity | AzureMLUpdateResourceActivity | AzureMLExecutePipelineActivity | DataLakeAnalyticsUsqlActivity | DatabricksNotebookActivity | DatabricksSparkJarActivity | DatabricksSparkPythonActivity | AzureFunctionActivity | ExecuteDataFlowActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity; - -// @public -export interface ExposureControlRequest { - featureName?: string; - featureType?: string; -} - -// @public -export interface ExposureControlResponse { - readonly featureName?: string; - readonly value?: string; -} - -// @public -export interface Expression { - type: ExpressionType; - value: string; -} - -// @public -export type ExpressionType = string; - -// @public -export type FileServerLinkedService = LinkedService & { - host: any; - userId?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type FileServerLocation = DatasetLocation & {}; - -// @public -export type FileServerReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - enablePartitionDiscovery?: boolean; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; -}; - -// @public -export type FileServerWriteSettings = StoreWriteSettings & {}; - -// @public -export type FileSystemSink = CopySink & { - copyBehavior?: any; -}; - -// @public -export type FileSystemSource = CopySource & { - recursive?: any; -}; - -// @public -export type FilterActivity = Activity & { - items: Expression; - condition: Expression; -}; - -// @public -export type ForEachActivity = Activity & { - isSequential?: boolean; - batchCount?: number; - items: Expression; - activities: ActivityUnion[]; -}; - -// @public -export interface FormatReadSettings { - [property: string]: any; - type: "DelimitedTextReadSettings"; -} - -// @public (undocumented) -export type FormatReadSettingsUnion = DelimitedTextReadSettings; - -// @public -export interface FormatWriteSettings { - [property: string]: any; - type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; -} - -// @public (undocumented) -export type FormatWriteSettingsUnion = AvroWriteSettings | DelimitedTextWriteSettings | JsonWriteSettings; - -// @public -export type FtpAuthenticationType = string; - -// @public -export type FtpReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - useBinaryTransfer?: boolean; -}; - -// @public -export type FtpServerLinkedService = LinkedService & { - host: any; - port?: any; - authenticationType?: FtpAuthenticationType; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; - enableSsl?: any; - enableServerCertificateValidation?: any; -}; - -// @public -export type FtpServerLocation = DatasetLocation & {}; - -// @public -export type GetMetadataActivity = ExecutionActivity & { - dataset: DatasetReference; - fieldList?: any[]; -}; - -// @public -export interface GetSsisObjectMetadataRequest { - metadataPath?: string; -} - -// @public (undocumented) -export interface GitHubAccessTokenRequest { - gitHubAccessCode: string; - gitHubAccessTokenBaseUrl: string; - gitHubClientId: string; -} - -// @public (undocumented) -export interface GitHubAccessTokenResponse { - // (undocumented) - gitHubAccessToken?: string; -} - -// @public -export type GoogleAdWordsAuthenticationType = string; - -// @public -export type GoogleAdWordsLinkedService = LinkedService & { - clientCustomerID: any; - developerToken: SecretBaseUnion; - authenticationType: GoogleAdWordsAuthenticationType; - refreshToken?: SecretBaseUnion; - clientId?: any; - clientSecret?: SecretBaseUnion; - email?: any; - keyFilePath?: any; - trustedCertPath?: any; - useSystemTrustStore?: any; - encryptedCredential?: any; -}; - -// @public -export type GoogleAdWordsObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type GoogleAdWordsSource = TabularSource & { - query?: any; -}; - -// @public -export type GoogleBigQueryAuthenticationType = string; - -// @public -export type GoogleBigQueryLinkedService = LinkedService & { - project: any; - additionalProjects?: any; - requestGoogleDriveScope?: any; - authenticationType: GoogleBigQueryAuthenticationType; - refreshToken?: SecretBaseUnion; - clientId?: any; - clientSecret?: SecretBaseUnion; - email?: any; - keyFilePath?: any; - trustedCertPath?: any; - useSystemTrustStore?: any; - encryptedCredential?: any; -}; - -// @public -export type GoogleBigQueryObjectDataset = Dataset & { - tableName?: any; - table?: any; - dataset?: any; -}; - -// @public -export type GoogleBigQuerySource = TabularSource & { - query?: any; -}; - -// @public -export type GoogleCloudStorageLinkedService = LinkedService & { - accessKeyId?: any; - secretAccessKey?: SecretBaseUnion; - serviceUrl?: any; - encryptedCredential?: any; -}; - -// @public -export type GoogleCloudStorageLocation = DatasetLocation & { - bucketName?: any; - version?: any; -}; - -// @public -export type GoogleCloudStorageReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - prefix?: any; - enablePartitionDiscovery?: boolean; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; -}; - -// @public -export type GreenplumLinkedService = LinkedService & { - connectionString?: any; - pwd?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type GreenplumSource = TabularSource & { - query?: any; -}; - -// @public -export type GreenplumTableDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type HBaseAuthenticationType = string; - -// @public -export type HBaseLinkedService = LinkedService & { - host: any; - port?: any; - httpPath?: any; - authenticationType: HBaseAuthenticationType; - username?: any; - password?: SecretBaseUnion; - enableSsl?: any; - trustedCertPath?: any; - allowHostNameCNMismatch?: any; - allowSelfSignedServerCert?: any; - encryptedCredential?: any; -}; - -// @public -export type HBaseObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type HBaseSource = TabularSource & { - query?: any; -}; - -// @public -export type HdfsLinkedService = LinkedService & { - url: any; - authenticationType?: any; - encryptedCredential?: any; - userName?: any; - password?: SecretBaseUnion; -}; - -// @public -export type HdfsLocation = DatasetLocation & {}; - -// @public -export type HdfsReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - enablePartitionDiscovery?: boolean; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; - distcpSettings?: DistcpSettings; -}; - -// @public -export type HdfsSource = CopySource & { - recursive?: any; - distcpSettings?: DistcpSettings; -}; - -// @public -export type HdiNodeTypes = string; - -// @public -export type HDInsightActivityDebugInfoOption = string; - -// @public -export type HDInsightHiveActivity = ExecutionActivity & { - storageLinkedServices?: LinkedServiceReference[]; - arguments?: any[]; - getDebugInfo?: HDInsightActivityDebugInfoOption; - scriptPath?: any; - scriptLinkedService?: LinkedServiceReference; - defines?: { - [propertyName: string]: any; - }; - variables?: any[]; - queryTimeout?: number; -}; - -// @public -export type HDInsightLinkedService = LinkedService & { - clusterUri: any; - userName?: any; - password?: SecretBaseUnion; - linkedServiceName?: LinkedServiceReference; - hcatalogLinkedServiceName?: LinkedServiceReference; - encryptedCredential?: any; - isEspEnabled?: any; - fileSystem?: any; -}; - -// @public -export type HDInsightMapReduceActivity = ExecutionActivity & { - storageLinkedServices?: LinkedServiceReference[]; - arguments?: any[]; - getDebugInfo?: HDInsightActivityDebugInfoOption; - className: any; - jarFilePath: any; - jarLinkedService?: LinkedServiceReference; - jarLibs?: any[]; - defines?: { - [propertyName: string]: any; - }; -}; - -// @public -export type HDInsightOnDemandLinkedService = LinkedService & { - clusterSize: any; - timeToLive: any; - version: any; - linkedServiceName: LinkedServiceReference; - hostSubscriptionId: any; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant: any; - clusterResourceGroup: any; - clusterNamePrefix?: any; - clusterUserName?: any; - clusterPassword?: SecretBaseUnion; - clusterSshUserName?: any; - clusterSshPassword?: SecretBaseUnion; - additionalLinkedServiceNames?: LinkedServiceReference[]; - hcatalogLinkedServiceName?: LinkedServiceReference; - clusterType?: any; - sparkVersion?: any; - coreConfiguration?: any; - hBaseConfiguration?: any; - hdfsConfiguration?: any; - hiveConfiguration?: any; - mapReduceConfiguration?: any; - oozieConfiguration?: any; - stormConfiguration?: any; - yarnConfiguration?: any; - encryptedCredential?: any; - headNodeSize?: any; - dataNodeSize?: any; - zookeeperNodeSize?: any; - scriptActions?: ScriptAction[]; - virtualNetworkId?: any; - subnetName?: any; -}; - -// @public -export type HDInsightPigActivity = ExecutionActivity & { - storageLinkedServices?: LinkedServiceReference[]; - arguments?: any; - getDebugInfo?: HDInsightActivityDebugInfoOption; - scriptPath?: any; - scriptLinkedService?: LinkedServiceReference; - defines?: { - [propertyName: string]: any; - }; -}; - -// @public -export type HDInsightSparkActivity = ExecutionActivity & { - rootPath: any; - entryFilePath: any; - arguments?: any[]; - getDebugInfo?: HDInsightActivityDebugInfoOption; - sparkJobLinkedService?: LinkedServiceReference; - className?: string; - proxyUser?: any; - sparkConfig?: { - [propertyName: string]: any; - }; -}; - -// @public -export type HDInsightStreamingActivity = ExecutionActivity & { - storageLinkedServices?: LinkedServiceReference[]; - arguments?: any[]; - getDebugInfo?: HDInsightActivityDebugInfoOption; - mapper: any; - reducer: any; - input: any; - output: any; - filePaths: any[]; - fileLinkedService?: LinkedServiceReference; - combiner?: any; - commandEnvironment?: any[]; - defines?: { - [propertyName: string]: any; - }; -}; - -// @public -export type HiveAuthenticationType = string; - -// @public -export type HiveLinkedService = LinkedService & { - host: any; - port?: any; - serverType?: HiveServerType; - thriftTransportProtocol?: HiveThriftTransportProtocol; - authenticationType: HiveAuthenticationType; - serviceDiscoveryMode?: any; - zooKeeperNameSpace?: any; - useNativeQuery?: any; - username?: any; - password?: SecretBaseUnion; - httpPath?: any; - enableSsl?: any; - trustedCertPath?: any; - useSystemTrustStore?: any; - allowHostNameCNMismatch?: any; - allowSelfSignedServerCert?: any; - encryptedCredential?: any; -}; - -// @public -export type HiveObjectDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type HiveServerType = string; - -// @public -export type HiveSource = TabularSource & { - query?: any; -}; - -// @public -export type HiveThriftTransportProtocol = string; - -// @public -export type HttpAuthenticationType = string; - -// @public -export type HttpLinkedService = LinkedService & { - url: any; - authenticationType?: HttpAuthenticationType; - userName?: any; - password?: SecretBaseUnion; - embeddedCertData?: any; - certThumbprint?: any; - encryptedCredential?: any; - enableServerCertificateValidation?: any; -}; - -// @public -export type HttpReadSettings = StoreReadSettings & { - requestMethod?: any; - requestBody?: any; - additionalHeaders?: any; - requestTimeout?: any; -}; - -// @public -export type HttpServerLocation = DatasetLocation & { - relativeUrl?: any; -}; - -// @public -export type HttpSource = CopySource & { - httpRequestTimeout?: any; -}; - -// @public -export type HubspotLinkedService = LinkedService & { - clientId: any; - clientSecret?: SecretBaseUnion; - accessToken?: SecretBaseUnion; - refreshToken?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type HubspotObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type HubspotSource = TabularSource & { - query?: any; -}; - -// @public -export type IfConditionActivity = Activity & { - expression: Expression; - ifTrueActivities?: ActivityUnion[]; - ifFalseActivities?: ActivityUnion[]; -}; - -// @public -export type ImpalaAuthenticationType = string; - -// @public -export type ImpalaLinkedService = LinkedService & { - host: any; - port?: any; - authenticationType: ImpalaAuthenticationType; - username?: any; - password?: SecretBaseUnion; - enableSsl?: any; - trustedCertPath?: any; - useSystemTrustStore?: any; - allowHostNameCNMismatch?: any; - allowSelfSignedServerCert?: any; - encryptedCredential?: any; -}; - -// @public -export type ImpalaObjectDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type ImpalaSource = TabularSource & { - query?: any; -}; - -// @public -export type InformixLinkedService = LinkedService & { - connectionString: any; - authenticationType?: any; - credential?: SecretBaseUnion; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type InformixSink = CopySink & { - preCopyScript?: any; -}; - -// @public -export type InformixSource = TabularSource & { - query?: any; -}; - -// @public -export type InformixTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export interface IntegrationRuntime { - [property: string]: any; - description?: string; - type: "Managed" | "SelfHosted"; -} - -// @public -export interface IntegrationRuntimeComputeProperties { - [property: string]: any; - dataFlowProperties?: IntegrationRuntimeDataFlowProperties; - location?: string; - maxParallelExecutionsPerNode?: number; - nodeSize?: string; - numberOfNodes?: number; - vNetProperties?: IntegrationRuntimeVNetProperties; -} - -// @public -export interface IntegrationRuntimeCustomSetupScriptProperties { - blobContainerUri?: string; - sasToken?: SecureString; -} - -// @public -export interface IntegrationRuntimeDataFlowProperties { - [property: string]: any; - computeType?: DataFlowComputeType; - coreCount?: number; - timeToLive?: number; -} - -// @public -export interface IntegrationRuntimeDataProxyProperties { - connectVia?: EntityReference; - path?: string; - stagingLinkedService?: EntityReference; -} - -// @public -export type IntegrationRuntimeEdition = string; - -// @public -export type IntegrationRuntimeEntityReferenceType = string; - -// @public -export type IntegrationRuntimeLicenseType = string; - -// @public -export interface IntegrationRuntimeListResponse { - nextLink?: string; - value: IntegrationRuntimeResource[]; -} - -// @public -export interface IntegrationRuntimeReference { - parameters?: { - [propertyName: string]: any; - }; - referenceName: string; - type: IntegrationRuntimeReferenceType; -} - -// @public -export type IntegrationRuntimeReferenceType = string; - -// @public -export type IntegrationRuntimeResource = AzureEntityResource & { - properties: IntegrationRuntimeUnion; -}; - -// @public -export type IntegrationRuntimesGetResponse = IntegrationRuntimeResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: IntegrationRuntimeResource; - }; -}; - -// @public -export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: IntegrationRuntimeListResponse; - }; -}; - -// @public -export interface IntegrationRuntimeSsisCatalogInfo { - [property: string]: any; - catalogAdminPassword?: SecureString; - catalogAdminUserName?: string; - catalogPricingTier?: IntegrationRuntimeSsisCatalogPricingTier; - catalogServerEndpoint?: string; -} - -// @public -export type IntegrationRuntimeSsisCatalogPricingTier = string; - -// @public -export interface IntegrationRuntimeSsisProperties { - [property: string]: any; - catalogInfo?: IntegrationRuntimeSsisCatalogInfo; - customSetupScriptProperties?: IntegrationRuntimeCustomSetupScriptProperties; - dataProxyProperties?: IntegrationRuntimeDataProxyProperties; - edition?: IntegrationRuntimeEdition; - expressCustomSetupProperties?: CustomSetupBase[]; - licenseType?: IntegrationRuntimeLicenseType; -} - -// @public -export type IntegrationRuntimeState = string; - -// @public -export type IntegrationRuntimeType = string; - -// @public (undocumented) -export type IntegrationRuntimeUnion = ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; - -// @public -export interface IntegrationRuntimeVNetProperties { - [property: string]: any; - publicIPs?: string[]; - subnet?: string; - vNetId?: string; -} - -// @public -export type JiraLinkedService = LinkedService & { - host: any; - port?: any; - username: any; - password?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type JiraObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type JiraSource = TabularSource & { - query?: any; -}; - -// @public -export type JsonDataset = Dataset & { - location?: DatasetLocationUnion; - encodingName?: any; - compression?: DatasetCompressionUnion; -}; - -// @public -export type JsonFormat = DatasetStorageFormat & { - filePattern?: JsonFormatFilePattern; - nestingSeparator?: any; - encodingName?: any; - jsonNodeReference?: any; - jsonPathDefinition?: any; -}; - -// @public -export type JsonFormatFilePattern = string; - -// @public -export type JsonSink = CopySink & { - storeSettings?: StoreWriteSettingsUnion; - formatSettings?: JsonWriteSettings; -}; - -// @public -export type JsonSource = CopySource & { - storeSettings?: StoreReadSettingsUnion; -}; - -// @public -export type JsonWriteFilePattern = string; - -// @public -export type JsonWriteSettings = FormatWriteSettings & { - filePattern?: JsonWriteFilePattern; -}; - -// @public -export const enum KnownAvroCompressionCodec { - // (undocumented) - Bzip2 = "bzip2", - // (undocumented) - Deflate = "deflate", - // (undocumented) - None = "none", - // (undocumented) - Snappy = "snappy", - // (undocumented) - Xz = "xz" -} - -// @public -export const enum KnownAzureFunctionActivityMethod { - // (undocumented) - Delete = "DELETE", - // (undocumented) - GET = "GET", - // (undocumented) - Head = "HEAD", - // (undocumented) - Options = "OPTIONS", - // (undocumented) - Post = "POST", - // (undocumented) - PUT = "PUT", - // (undocumented) - Trace = "TRACE" -} - -// @public -export const enum KnownAzureSearchIndexWriteBehaviorType { - // (undocumented) - Merge = "Merge", - // (undocumented) - Upload = "Upload" -} - -// @public -export const enum KnownBigDataPoolReferenceType { - // (undocumented) - BigDataPoolReference = "BigDataPoolReference" -} - -// @public -export const enum KnownBlobEventType { - // (undocumented) - MicrosoftStorageBlobCreated = "Microsoft.Storage.BlobCreated", - // (undocumented) - MicrosoftStorageBlobDeleted = "Microsoft.Storage.BlobDeleted" -} - -// @public -export const enum KnownCassandraSourceReadConsistencyLevels { - // (undocumented) - ALL = "ALL", - // (undocumented) - EachQuorum = "EACH_QUORUM", - // (undocumented) - LocalONE = "LOCAL_ONE", - // (undocumented) - LocalQuorum = "LOCAL_QUORUM", - // (undocumented) - LocalSerial = "LOCAL_SERIAL", - // (undocumented) - ONE = "ONE", - // (undocumented) - Quorum = "QUORUM", - // (undocumented) - Serial = "SERIAL", - // (undocumented) - Three = "THREE", - // (undocumented) - TWO = "TWO" -} - -// @public -export const enum KnownCellOutputType { - // (undocumented) - DisplayData = "display_data", - // (undocumented) - Error = "error", - // (undocumented) - ExecuteResult = "execute_result", - // (undocumented) - Stream = "stream" -} - -// @public -export const enum KnownCopyBehaviorType { - // (undocumented) - FlattenHierarchy = "FlattenHierarchy", - // (undocumented) - MergeFiles = "MergeFiles", - // (undocumented) - PreserveHierarchy = "PreserveHierarchy" -} - -// @public -export const enum KnownDataFlowComputeType { - // (undocumented) - ComputeOptimized = "ComputeOptimized", - // (undocumented) - General = "General", - // (undocumented) - MemoryOptimized = "MemoryOptimized" -} - -// @public -export const enum KnownDataFlowReferenceType { - // (undocumented) - DataFlowReference = "DataFlowReference" -} - -// @public -export const enum KnownDatasetCompressionLevel { - // (undocumented) - Fastest = "Fastest", - // (undocumented) - Optimal = "Optimal" -} - -// @public -export const enum KnownDatasetReferenceType { - // (undocumented) - DatasetReference = "DatasetReference" -} - -// @public -export const enum KnownDb2AuthenticationType { - // (undocumented) - Basic = "Basic" -} - -// @public -export const enum KnownDelimitedTextCompressionCodec { - // (undocumented) - Bzip2 = "bzip2", - // (undocumented) - Deflate = "deflate", - // (undocumented) - Gzip = "gzip", - // (undocumented) - Lz4 = "lz4", - // (undocumented) - Snappy = "snappy", - // (undocumented) - ZipDeflate = "zipDeflate" -} - -// @public -export const enum KnownDependencyCondition { - // (undocumented) - Completed = "Completed", - // (undocumented) - Failed = "Failed", - // (undocumented) - Skipped = "Skipped", - // (undocumented) - Succeeded = "Succeeded" -} - -// @public -export const enum KnownDynamicsAuthenticationType { - // (undocumented) - AADServicePrincipal = "AADServicePrincipal", - // (undocumented) - Ifd = "Ifd", - // (undocumented) - Office365 = "Office365" -} - -// @public -export const enum KnownDynamicsDeploymentType { - // (undocumented) - Online = "Online", - // (undocumented) - OnPremisesWithIfd = "OnPremisesWithIfd" -} - -// @public -export const enum KnownDynamicsServicePrincipalCredentialType { - // (undocumented) - ServicePrincipalCert = "ServicePrincipalCert", - // (undocumented) - ServicePrincipalKey = "ServicePrincipalKey" -} - -// @public -export const enum KnownDynamicsSinkWriteBehavior { - // (undocumented) - Upsert = "Upsert" -} - -// @public -export const enum KnownEventSubscriptionStatus { - // (undocumented) - Deprovisioning = "Deprovisioning", - // (undocumented) - Disabled = "Disabled", - // (undocumented) - Enabled = "Enabled", - // (undocumented) - Provisioning = "Provisioning", - // (undocumented) - Unknown = "Unknown" -} - -// @public -export const enum KnownExpressionType { - // (undocumented) - Expression = "Expression" -} - -// @public -export const enum KnownFtpAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Basic = "Basic" -} - -// @public -export const enum KnownGoogleAdWordsAuthenticationType { - // (undocumented) - ServiceAuthentication = "ServiceAuthentication", - // (undocumented) - UserAuthentication = "UserAuthentication" -} - -// @public -export const enum KnownGoogleBigQueryAuthenticationType { - // (undocumented) - ServiceAuthentication = "ServiceAuthentication", - // (undocumented) - UserAuthentication = "UserAuthentication" -} - -// @public -export const enum KnownHBaseAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Basic = "Basic" -} - -// @public -export const enum KnownHdiNodeTypes { - // (undocumented) - Headnode = "Headnode", - // (undocumented) - Workernode = "Workernode", - // (undocumented) - Zookeeper = "Zookeeper" -} - -// @public -export const enum KnownHDInsightActivityDebugInfoOption { - // (undocumented) - Always = "Always", - // (undocumented) - Failure = "Failure", - // (undocumented) - None = "None" -} - -// @public -export const enum KnownHiveAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Username = "Username", - // (undocumented) - UsernameAndPassword = "UsernameAndPassword", - // (undocumented) - WindowsAzureHDInsightService = "WindowsAzureHDInsightService" -} - -// @public -export const enum KnownHiveServerType { - // (undocumented) - HiveServer1 = "HiveServer1", - // (undocumented) - HiveServer2 = "HiveServer2", - // (undocumented) - HiveThriftServer = "HiveThriftServer" -} - -// @public -export const enum KnownHiveThriftTransportProtocol { - // (undocumented) - Binary = "Binary", - // (undocumented) - Http = "HTTP ", - // (undocumented) - Sasl = "SASL" -} - -// @public -export const enum KnownHttpAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Basic = "Basic", - // (undocumented) - ClientCertificate = "ClientCertificate", - // (undocumented) - Digest = "Digest", - // (undocumented) - Windows = "Windows" -} - -// @public -export const enum KnownImpalaAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - SaslUsername = "SASLUsername", - // (undocumented) - UsernameAndPassword = "UsernameAndPassword" -} - -// @public -export const enum KnownIntegrationRuntimeEdition { - // (undocumented) - Enterprise = "Enterprise", - // (undocumented) - Standard = "Standard" -} - -// @public -export const enum KnownIntegrationRuntimeEntityReferenceType { - // (undocumented) - IntegrationRuntimeReference = "IntegrationRuntimeReference", - // (undocumented) - LinkedServiceReference = "LinkedServiceReference" -} - -// @public -export const enum KnownIntegrationRuntimeLicenseType { - // (undocumented) - BasePrice = "BasePrice", - // (undocumented) - LicenseIncluded = "LicenseIncluded" -} - -// @public -export const enum KnownIntegrationRuntimeReferenceType { - // (undocumented) - IntegrationRuntimeReference = "IntegrationRuntimeReference" -} - -// @public -export const enum KnownIntegrationRuntimeSsisCatalogPricingTier { - // (undocumented) - Basic = "Basic", - // (undocumented) - Premium = "Premium", - // (undocumented) - PremiumRS = "PremiumRS", - // (undocumented) - Standard = "Standard" -} - -// @public -export const enum KnownIntegrationRuntimeState { - // (undocumented) - AccessDenied = "AccessDenied", - // (undocumented) - Initial = "Initial", - // (undocumented) - Limited = "Limited", - // (undocumented) - NeedRegistration = "NeedRegistration", - // (undocumented) - Offline = "Offline", - // (undocumented) - Online = "Online", - // (undocumented) - Started = "Started", - // (undocumented) - Starting = "Starting", - // (undocumented) - Stopped = "Stopped", - // (undocumented) - Stopping = "Stopping" -} - -// @public -export const enum KnownIntegrationRuntimeType { - // (undocumented) - Managed = "Managed", - // (undocumented) - SelfHosted = "SelfHosted" -} - -// @public -export const enum KnownJsonFormatFilePattern { - // (undocumented) - ArrayOfObjects = "arrayOfObjects", - // (undocumented) - SetOfObjects = "setOfObjects" -} - -// @public -export const enum KnownJsonWriteFilePattern { - // (undocumented) - ArrayOfObjects = "arrayOfObjects", - // (undocumented) - SetOfObjects = "setOfObjects" -} - -// @public -export const enum KnownMongoDbAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Basic = "Basic" -} - -// @public -export const enum KnownNetezzaPartitionOption { - // (undocumented) - DataSlice = "DataSlice", - // (undocumented) - DynamicRange = "DynamicRange", - // (undocumented) - None = "None" -} - -// @public -export const enum KnownNodeSize { - // (undocumented) - Large = "Large", - // (undocumented) - Medium = "Medium", - // (undocumented) - None = "None", - // (undocumented) - Small = "Small", - // (undocumented) - XLarge = "XLarge", - // (undocumented) - XXLarge = "XXLarge", - // (undocumented) - XXXLarge = "XXXLarge" -} - -// @public -export const enum KnownNodeSizeFamily { - // (undocumented) - MemoryOptimized = "MemoryOptimized", - // (undocumented) - None = "None" -} - -// @public -export const enum KnownNotebookReferenceType { - // (undocumented) - NotebookReference = "NotebookReference" -} - -// @public -export const enum KnownODataAadServicePrincipalCredentialType { - // (undocumented) - ServicePrincipalCert = "ServicePrincipalCert", - // (undocumented) - ServicePrincipalKey = "ServicePrincipalKey" -} - -// @public -export const enum KnownODataAuthenticationType { - // (undocumented) - AadServicePrincipal = "AadServicePrincipal", - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Basic = "Basic", - // (undocumented) - ManagedServiceIdentity = "ManagedServiceIdentity", - // (undocumented) - Windows = "Windows" -} - -// @public -export const enum KnownOraclePartitionOption { - // (undocumented) - DynamicRange = "DynamicRange", - // (undocumented) - None = "None", - // (undocumented) - PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable" -} - -// @public -export const enum KnownOrcCompressionCodec { - // (undocumented) - None = "none", - // (undocumented) - Snappy = "snappy", - // (undocumented) - Zlib = "zlib" -} - -// @public -export const enum KnownParameterType { - // (undocumented) - Array = "Array", - // (undocumented) - Bool = "Bool", - // (undocumented) - Float = "Float", - // (undocumented) - Int = "Int", - // (undocumented) - Object = "Object", - // (undocumented) - SecureString = "SecureString", - // (undocumented) - String = "String" -} - -// @public -export const enum KnownParquetCompressionCodec { - // (undocumented) - Gzip = "gzip", - // (undocumented) - Lzo = "lzo", - // (undocumented) - None = "none", - // (undocumented) - Snappy = "snappy" -} - -// @public -export const enum KnownPhoenixAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - UsernameAndPassword = "UsernameAndPassword", - // (undocumented) - WindowsAzureHDInsightService = "WindowsAzureHDInsightService" -} - -// @public -export const enum KnownPipelineReferenceType { - // (undocumented) - PipelineReference = "PipelineReference" -} - -// @public -export const enum KnownPluginCurrentState { - // (undocumented) - Cleanup = "Cleanup", - // (undocumented) - Ended = "Ended", - // (undocumented) - Monitoring = "Monitoring", - // (undocumented) - Preparation = "Preparation", - // (undocumented) - Queued = "Queued", - // (undocumented) - ResourceAcquisition = "ResourceAcquisition", - // (undocumented) - Submission = "Submission" -} - -// @public -export const enum KnownPolybaseSettingsRejectType { - // (undocumented) - Percentage = "percentage", - // (undocumented) - Value = "value" -} - -// @public -export const enum KnownPrestoAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Ldap = "LDAP" -} - -// @public -export const enum KnownRecurrenceFrequency { - // (undocumented) - Day = "Day", - // (undocumented) - Hour = "Hour", - // (undocumented) - Minute = "Minute", - // (undocumented) - Month = "Month", - // (undocumented) - NotSpecified = "NotSpecified", - // (undocumented) - Week = "Week", - // (undocumented) - Year = "Year" -} - -// @public -export const enum KnownRestServiceAuthenticationType { - // (undocumented) - AadServicePrincipal = "AadServicePrincipal", - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Basic = "Basic", - // (undocumented) - ManagedServiceIdentity = "ManagedServiceIdentity" -} - -// @public -export const enum KnownRunQueryFilterOperand { - // (undocumented) - ActivityName = "ActivityName", - // (undocumented) - ActivityRunEnd = "ActivityRunEnd", - // (undocumented) - ActivityRunStart = "ActivityRunStart", - // (undocumented) - ActivityType = "ActivityType", - // (undocumented) - LatestOnly = "LatestOnly", - // (undocumented) - PipelineName = "PipelineName", - // (undocumented) - RunEnd = "RunEnd", - // (undocumented) - RunGroupId = "RunGroupId", - // (undocumented) - RunStart = "RunStart", - // (undocumented) - Status = "Status", - // (undocumented) - TriggerName = "TriggerName", - // (undocumented) - TriggerRunTimestamp = "TriggerRunTimestamp" -} - -// @public -export const enum KnownRunQueryFilterOperator { - // (undocumented) - Equals = "Equals", - // (undocumented) - In = "In", - // (undocumented) - NotEquals = "NotEquals", - // (undocumented) - NotIn = "NotIn" -} - -// @public -export const enum KnownRunQueryOrder { - // (undocumented) - ASC = "ASC", - // (undocumented) - Desc = "DESC" -} - -// @public -export const enum KnownRunQueryOrderByField { - // (undocumented) - ActivityName = "ActivityName", - // (undocumented) - ActivityRunEnd = "ActivityRunEnd", - // (undocumented) - ActivityRunStart = "ActivityRunStart", - // (undocumented) - PipelineName = "PipelineName", - // (undocumented) - RunEnd = "RunEnd", - // (undocumented) - RunStart = "RunStart", - // (undocumented) - Status = "Status", - // (undocumented) - TriggerName = "TriggerName", - // (undocumented) - TriggerRunTimestamp = "TriggerRunTimestamp" -} - -// @public -export const enum KnownSalesforceSinkWriteBehavior { - // (undocumented) - Insert = "Insert", - // (undocumented) - Upsert = "Upsert" -} - -// @public -export const enum KnownSalesforceSourceReadBehavior { - // (undocumented) - Query = "Query", - // (undocumented) - QueryAll = "QueryAll" -} - -// @public -export const enum KnownSapCloudForCustomerSinkWriteBehavior { - // (undocumented) - Insert = "Insert", - // (undocumented) - Update = "Update" -} - -// @public -export const enum KnownSapHanaAuthenticationType { - // (undocumented) - Basic = "Basic", - // (undocumented) - Windows = "Windows" -} - -// @public -export const enum KnownSapHanaPartitionOption { - // (undocumented) - None = "None", - // (undocumented) - PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", - // (undocumented) - SapHanaDynamicRange = "SapHanaDynamicRange" -} - -// @public -export const enum KnownSapTablePartitionOption { - // (undocumented) - None = "None", - // (undocumented) - PartitionOnCalendarDate = "PartitionOnCalendarDate", - // (undocumented) - PartitionOnCalendarMonth = "PartitionOnCalendarMonth", - // (undocumented) - PartitionOnCalendarYear = "PartitionOnCalendarYear", - // (undocumented) - PartitionOnInt = "PartitionOnInt", - // (undocumented) - PartitionOnTime = "PartitionOnTime" -} - -// @public -export const enum KnownSchedulerCurrentState { - // (undocumented) - Ended = "Ended", - // (undocumented) - Queued = "Queued", - // (undocumented) - Scheduled = "Scheduled" -} - -// @public -export const enum KnownServiceNowAuthenticationType { - // (undocumented) - Basic = "Basic", - // (undocumented) - OAuth2 = "OAuth2" -} - -// @public -export const enum KnownSftpAuthenticationType { - // (undocumented) - Basic = "Basic", - // (undocumented) - SshPublicKey = "SshPublicKey" -} - -// @public -export const enum KnownSparkAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Username = "Username", - // (undocumented) - UsernameAndPassword = "UsernameAndPassword", - // (undocumented) - WindowsAzureHDInsightService = "WindowsAzureHDInsightService" -} - -// @public -export const enum KnownSparkBatchJobResultType { - // (undocumented) - Cancelled = "Cancelled", - // (undocumented) - Failed = "Failed", - // (undocumented) - Succeeded = "Succeeded", - // (undocumented) - Uncertain = "Uncertain" -} - -// @public -export const enum KnownSparkErrorSource { - // (undocumented) - Dependency = "Dependency", - // (undocumented) - System = "System", - // (undocumented) - Unknown = "Unknown", - // (undocumented) - User = "User" -} - -// @public -export const enum KnownSparkJobReferenceType { - // (undocumented) - SparkJobDefinitionReference = "SparkJobDefinitionReference" -} - -// @public -export const enum KnownSparkJobType { - // (undocumented) - SparkBatch = "SparkBatch", - // (undocumented) - SparkSession = "SparkSession" -} - -// @public -export const enum KnownSparkServerType { - // (undocumented) - SharkServer = "SharkServer", - // (undocumented) - SharkServer2 = "SharkServer2", - // (undocumented) - SparkThriftServer = "SparkThriftServer" -} - -// @public -export const enum KnownSparkThriftTransportProtocol { - // (undocumented) - Binary = "Binary", - // (undocumented) - Http = "HTTP ", - // (undocumented) - Sasl = "SASL" -} - -// @public -export const enum KnownSqlConnectionType { - // (undocumented) - SqlOnDemand = "SqlOnDemand", - // (undocumented) - SqlPool = "SqlPool" -} - -// @public -export const enum KnownSqlPoolReferenceType { - // (undocumented) - SqlPoolReference = "SqlPoolReference" -} - -// @public -export const enum KnownSqlScriptType { - // (undocumented) - SqlQuery = "SqlQuery" -} - -// @public -export const enum KnownSsisLogLocationType { - // (undocumented) - File = "File" -} - -// @public -export const enum KnownSsisPackageLocationType { - // (undocumented) - File = "File", - // (undocumented) - InlinePackage = "InlinePackage", - // (undocumented) - Ssisdb = "SSISDB" -} - -// @public -export const enum KnownStoredProcedureParameterType { - // (undocumented) - Boolean = "Boolean", - // (undocumented) - Date = "Date", - // (undocumented) - Decimal = "Decimal", - // (undocumented) - Guid = "Guid", - // (undocumented) - Int = "Int", - // (undocumented) - Int64 = "Int64", - // (undocumented) - String = "String" -} - -// @public -export const enum KnownSybaseAuthenticationType { - // (undocumented) - Basic = "Basic", - // (undocumented) - Windows = "Windows" -} - -// @public -export const enum KnownTeradataAuthenticationType { - // (undocumented) - Basic = "Basic", - // (undocumented) - Windows = "Windows" -} - -// @public -export const enum KnownTeradataPartitionOption { - // (undocumented) - DynamicRange = "DynamicRange", - // (undocumented) - Hash = "Hash", - // (undocumented) - None = "None" -} - -// @public -export const enum KnownTriggerReferenceType { - // (undocumented) - TriggerReference = "TriggerReference" -} - -// @public -export const enum KnownTriggerRunStatus { - // (undocumented) - Failed = "Failed", - // (undocumented) - Inprogress = "Inprogress", - // (undocumented) - Succeeded = "Succeeded" -} - -// @public -export const enum KnownTriggerRuntimeState { - // (undocumented) - Disabled = "Disabled", - // (undocumented) - Started = "Started", - // (undocumented) - Stopped = "Stopped" -} - -// @public -export const enum KnownTumblingWindowFrequency { - // (undocumented) - Hour = "Hour", - // (undocumented) - Minute = "Minute" -} - -// @public -export const enum KnownType { - // (undocumented) - LinkedServiceReference = "LinkedServiceReference" -} - -// @public -export const enum KnownVariableType { - // (undocumented) - Array = "Array", - // (undocumented) - Bool = "Bool", - // (undocumented) - Boolean = "Boolean", - // (undocumented) - String = "String" -} - -// @public -export const enum KnownWebActivityMethod { - // (undocumented) - Delete = "DELETE", - // (undocumented) - GET = "GET", - // (undocumented) - Post = "POST", - // (undocumented) - PUT = "PUT" -} - -// @public -export const enum KnownWebAuthenticationType { - // (undocumented) - Anonymous = "Anonymous", - // (undocumented) - Basic = "Basic", - // (undocumented) - ClientCertificate = "ClientCertificate" -} - -// @public -export const enum KnownWebHookActivityMethod { - // (undocumented) - Post = "POST" -} - -// @public -export interface LibraryRequirements { - content?: string; - filename?: string; - readonly time?: Date; -} - -// @public -export type LinkedIntegrationRuntimeKeyAuthorization = LinkedIntegrationRuntimeType & { - key: SecureString; -}; - -// @public -export type LinkedIntegrationRuntimeRbacAuthorization = LinkedIntegrationRuntimeType & { - resourceId: string; -}; - -// @public -export interface LinkedIntegrationRuntimeType { - authorizationType: "Key" | "RBAC"; -} - -// @public (undocumented) -export type LinkedIntegrationRuntimeTypeUnion = LinkedIntegrationRuntimeKeyAuthorization | LinkedIntegrationRuntimeRbacAuthorization; - -// @public -export interface LinkedService { - [property: string]: any; - annotations?: any[]; - connectVia?: IntegrationRuntimeReference; - description?: string; - parameters?: { - [propertyName: string]: ParameterSpecification; - }; - type: "AzureStorage" | "AzureBlobStorage" | "AzureTableStorage" | "AzureSqlDW" | "SqlServer" | "AzureSqlDatabase" | "AzureSqlMI" | "AzureBatch" | "AzureKeyVault" | "CosmosDb" | "Dynamics" | "DynamicsCrm" | "CommonDataServiceForApps" | "HDInsight" | "FileServer" | "AzureFileStorage" | "GoogleCloudStorage" | "Oracle" | "AzureMySql" | "MySql" | "PostgreSql" | "Sybase" | "Db2" | "Teradata" | "AzureML" | "AzureMLService" | "Odbc" | "Informix" | "MicrosoftAccess" | "Hdfs" | "OData" | "Web" | "Cassandra" | "MongoDb" | "MongoDbV2" | "CosmosDbMongoDbApi" | "AzureDataLakeStore" | "AzureBlobFS" | "Office365" | "Salesforce" | "SalesforceServiceCloud" | "SapCloudForCustomer" | "SapEcc" | "SapOpenHub" | "RestService" | "AmazonS3" | "AmazonRedshift" | "CustomDataSource" | "AzureSearch" | "HttpServer" | "FtpServer" | "Sftp" | "SapBW" | "SapHana" | "AmazonMWS" | "AzurePostgreSql" | "Concur" | "Couchbase" | "Drill" | "Eloqua" | "GoogleBigQuery" | "Greenplum" | "HBase" | "Hive" | "Hubspot" | "Impala" | "Jira" | "Magento" | "MariaDB" | "AzureMariaDB" | "Marketo" | "Paypal" | "Phoenix" | "Presto" | "QuickBooks" | "ServiceNow" | "Shopify" | "Spark" | "Square" | "Xero" | "Zoho" | "Vertica" | "Netezza" | "SalesforceMarketingCloud" | "HDInsightOnDemand" | "AzureDataLakeAnalytics" | "AzureDatabricks" | "Responsys" | "DynamicsAX" | "OracleServiceCloud" | "GoogleAdWords" | "SapTable" | "AzureDataExplorer" | "AzureFunction"; -} - -// @public -export interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams extends coreHttp.OperationOptions { - ifMatch?: string; -} - -// @public -export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: LinkedServiceResource; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export type LinkedServiceDebugResource = SubResourceDebugResource & { - properties: LinkedServiceUnion; -}; - -// @public -export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { - ifNoneMatch?: string; -} - -// @public -export type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: LinkedServiceResource; - }; -}; - -// @public -export type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: LinkedServiceListResponse; - }; -}; - -// @public -export type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: LinkedServiceListResponse; - }; -}; - -// @public -export interface LinkedServiceListResponse { - nextLink?: string; - value: LinkedServiceResource[]; -} - -// @public -export interface LinkedServiceReference { - parameters?: { - [propertyName: string]: any; - }; - referenceName: string; - type: Type; -} - -// @public -export type LinkedServiceResource = AzureEntityResource & { - properties: LinkedServiceUnion; -}; - -// @public (undocumented) -export type LinkedServiceUnion = AzureStorageLinkedService | AzureBlobStorageLinkedService | AzureTableStorageLinkedService | AzureSqlDWLinkedService | SqlServerLinkedService | AzureSqlDatabaseLinkedService | AzureSqlMILinkedService | AzureBatchLinkedService | AzureKeyVaultLinkedService | CosmosDbLinkedService | DynamicsLinkedService | DynamicsCrmLinkedService | CommonDataServiceForAppsLinkedService | HDInsightLinkedService | FileServerLinkedService | AzureFileStorageLinkedService | GoogleCloudStorageLinkedService | OracleLinkedService | AzureMySqlLinkedService | MySqlLinkedService | PostgreSqlLinkedService | SybaseLinkedService | Db2LinkedService | TeradataLinkedService | AzureMLLinkedService | AzureMLServiceLinkedService | OdbcLinkedService | InformixLinkedService | MicrosoftAccessLinkedService | HdfsLinkedService | ODataLinkedService | WebLinkedService | CassandraLinkedService | MongoDbLinkedService | MongoDbV2LinkedService | CosmosDbMongoDbApiLinkedService | AzureDataLakeStoreLinkedService | AzureBlobFSLinkedService | Office365LinkedService | SalesforceLinkedService | SalesforceServiceCloudLinkedService | SapCloudForCustomerLinkedService | SapEccLinkedService | SapOpenHubLinkedService | RestServiceLinkedService | AmazonS3LinkedService | AmazonRedshiftLinkedService | CustomDataSourceLinkedService | AzureSearchLinkedService | HttpLinkedService | FtpServerLinkedService | SftpServerLinkedService | SapBWLinkedService | SapHanaLinkedService | AmazonMWSLinkedService | AzurePostgreSqlLinkedService | ConcurLinkedService | CouchbaseLinkedService | DrillLinkedService | EloquaLinkedService | GoogleBigQueryLinkedService | GreenplumLinkedService | HBaseLinkedService | HiveLinkedService | HubspotLinkedService | ImpalaLinkedService | JiraLinkedService | MagentoLinkedService | MariaDBLinkedService | AzureMariaDBLinkedService | MarketoLinkedService | PaypalLinkedService | PhoenixLinkedService | PrestoLinkedService | QuickBooksLinkedService | ServiceNowLinkedService | ShopifyLinkedService | SparkLinkedService | SquareLinkedService | XeroLinkedService | ZohoLinkedService | VerticaLinkedService | NetezzaLinkedService | SalesforceMarketingCloudLinkedService | HDInsightOnDemandLinkedService | AzureDataLakeAnalyticsLinkedService | AzureDatabricksLinkedService | ResponsysLinkedService | DynamicsAXLinkedService | OracleServiceCloudLinkedService | GoogleAdWordsLinkedService | SapTableLinkedService | AzureDataExplorerLinkedService | AzureFunctionLinkedService; - -// @public -export interface LogStorageSettings { - [property: string]: any; - linkedServiceName: LinkedServiceReference; - path?: any; -} - -// @public -export type LookupActivity = ExecutionActivity & { - source: CopySourceUnion; - dataset: DatasetReference; - firstRowOnly?: any; -}; - -// @public -export type MagentoLinkedService = LinkedService & { - host: any; - accessToken?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type MagentoObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type MagentoSource = TabularSource & { - query?: any; -}; - -// @public -export interface ManagedIdentity { - readonly principalId?: string; - readonly tenantId?: string; - type?: ResourceIdentityType; -} - -// @public -export type ManagedIntegrationRuntime = IntegrationRuntime & { - readonly state?: IntegrationRuntimeState; - computeProperties?: IntegrationRuntimeComputeProperties; - ssisProperties?: IntegrationRuntimeSsisProperties; -}; - -// @public -export interface ManagedVirtualNetworkSettings { - allowedAadTenantIdsForLinking?: string[]; - linkedAccessCheckOnTargetResource?: boolean; - preventDataExfiltration?: boolean; -} - -// @public -export type MappingDataFlow = DataFlow & { - sources?: DataFlowSource[]; - sinks?: DataFlowSink[]; - transformations?: Transformation[]; - script?: string; -}; - -// @public -export type MariaDBLinkedService = LinkedService & { - connectionString?: any; - pwd?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type MariaDBSource = TabularSource & { - query?: any; -}; - -// @public -export type MariaDBTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export type MarketoLinkedService = LinkedService & { - endpoint: any; - clientId: any; - clientSecret?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type MarketoObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type MarketoSource = TabularSource & { - query?: any; -}; - -// @public -export type MicrosoftAccessLinkedService = LinkedService & { - connectionString: any; - authenticationType?: any; - credential?: SecretBaseUnion; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type MicrosoftAccessSink = CopySink & { - preCopyScript?: any; -}; - -// @public -export type MicrosoftAccessSource = CopySource & { - query?: any; -}; - -// @public -export type MicrosoftAccessTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export type MongoDbAuthenticationType = string; - -// @public -export type MongoDbCollectionDataset = Dataset & { - collectionName: any; -}; - -// @public -export interface MongoDbCursorMethodsProperties { - [property: string]: any; - limit?: any; - project?: any; - skip?: any; - sort?: any; -} - -// @public -export type MongoDbLinkedService = LinkedService & { - server: any; - authenticationType?: MongoDbAuthenticationType; - databaseName: any; - username?: any; - password?: SecretBaseUnion; - authSource?: any; - port?: any; - enableSsl?: any; - allowSelfSignedServerCert?: any; - encryptedCredential?: any; -}; - -// @public -export type MongoDbSource = CopySource & { - query?: any; -}; - -// @public -export type MongoDbV2CollectionDataset = Dataset & { - collection: any; -}; - -// @public -export type MongoDbV2LinkedService = LinkedService & { - connectionString: any; - database: any; -}; - -// @public -export type MongoDbV2Source = CopySource & { - filter?: any; - cursorMethods?: MongoDbCursorMethodsProperties; - batchSize?: any; - queryTimeout?: any; -}; - -// @public -export type MultiplePipelineTrigger = Trigger & { - pipelines?: TriggerPipelineReference[]; -}; - -// @public (undocumented) -export type MultiplePipelineTriggerUnion = ScheduleTrigger | BlobTrigger | BlobEventsTrigger; - -// @public -export type MySqlLinkedService = LinkedService & { - connectionString: any; - password?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type MySqlSource = TabularSource & { - query?: any; -}; - -// @public -export type MySqlTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export type NetezzaLinkedService = LinkedService & { - connectionString?: any; - pwd?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type NetezzaPartitionOption = string; - -// @public -export interface NetezzaPartitionSettings { - partitionColumnName?: any; - partitionLowerBound?: any; - partitionUpperBound?: any; -} - -// @public -export type NetezzaSource = TabularSource & { - query?: any; - partitionOption?: NetezzaPartitionOption; - partitionSettings?: NetezzaPartitionSettings; -}; - -// @public -export type NetezzaTableDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type NodeSize = string; - -// @public -export type NodeSizeFamily = string; - -// @public -export interface Notebook { - [property: string]: any; - bigDataPool?: BigDataPoolReference | null; - cells: NotebookCell[]; - description?: string; - metadata: NotebookMetadata; - nbformat: number; - nbformatMinor: number; - sessionProperties?: NotebookSessionProperties | null; -} - -// @public -export interface NotebookCell { - [property: string]: any; - attachments?: any; - cellType: string; - metadata: any; - outputs?: NotebookCellOutputItem[]; - source: string[]; -} - -// @public -export interface NotebookCellOutputItem { - data?: any; - executionCount?: number; - metadata?: any; - name?: string; - outputType: CellOutputType; - text?: any; -} - -// @public -export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { - ifMatch?: string; -} - -// @public -export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: NotebookResource; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { - ifNoneMatch?: string; -} - -// @public -export type NotebookGetNotebookResponse = NotebookResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: NotebookResource; - }; -}; - -// @public -export type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: NotebookListResponse; - }; -}; - -// @public -export type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: NotebookListResponse; - }; -}; - -// @public -export type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: NotebookListResponse; - }; -}; - -// @public -export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: NotebookListResponse; - }; -}; - -// @public -export interface NotebookKernelSpec { - [property: string]: any; - displayName: string; - name: string; -} - -// @public -export interface NotebookLanguageInfo { - [property: string]: any; - codemirrorMode?: string; - name: string; -} - -// @public -export interface NotebookListResponse { - nextLink?: string; - value: NotebookResource[]; -} - -// @public -export interface NotebookMetadata { - [property: string]: any; - kernelspec?: NotebookKernelSpec; - languageInfo?: NotebookLanguageInfo; -} - -// @public -export type NotebookReferenceType = string; - -// @public -export interface NotebookResource { - readonly etag?: string; - readonly id?: string; - name: string; - properties: Notebook; - readonly type?: string; -} - -// @public -export interface NotebookSessionProperties { - driverCores: number; - driverMemory: string; - executorCores: number; - executorMemory: string; - numExecutors: number; -} - -// @public -export type ODataAadServicePrincipalCredentialType = string; - -// @public -export type ODataAuthenticationType = string; - -// @public -export type ODataLinkedService = LinkedService & { - url: any; - authenticationType?: ODataAuthenticationType; - userName?: any; - password?: SecretBaseUnion; - tenant?: any; - servicePrincipalId?: any; - aadResourceId?: any; - aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType; - servicePrincipalKey?: SecretBaseUnion; - servicePrincipalEmbeddedCert?: SecretBaseUnion; - servicePrincipalEmbeddedCertPassword?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type ODataResourceDataset = Dataset & { - path?: any; -}; - -// @public -export type ODataSource = CopySource & { - query?: any; -}; - -// @public -export type OdbcLinkedService = LinkedService & { - connectionString: any; - authenticationType?: any; - credential?: SecretBaseUnion; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type OdbcSink = CopySink & { - preCopyScript?: any; -}; - -// @public -export type OdbcSource = TabularSource & { - query?: any; -}; - -// @public -export type OdbcTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export type Office365Dataset = Dataset & { - tableName: any; - predicate?: any; -}; - -// @public -export type Office365LinkedService = LinkedService & { - office365TenantId: any; - servicePrincipalTenantId: any; - servicePrincipalId: any; - servicePrincipalKey: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type Office365Source = CopySource & { - allowedGroups?: any; - userScopeFilterUri?: any; - dateFilterColumn?: any; - startTime?: any; - endTime?: any; - outputColumns?: any; -}; - -// @public -export type OracleLinkedService = LinkedService & { - connectionString: any; - password?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type OraclePartitionOption = string; - -// @public -export interface OraclePartitionSettings { - partitionColumnName?: any; - partitionLowerBound?: any; - partitionNames?: any; - partitionUpperBound?: any; -} - -// @public -export type OracleServiceCloudLinkedService = LinkedService & { - host: any; - username: any; - password: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type OracleServiceCloudObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type OracleServiceCloudSource = TabularSource & { - query?: any; -}; - -// @public -export type OracleSink = CopySink & { - preCopyScript?: any; -}; - -// @public -export type OracleSource = CopySource & { - oracleReaderQuery?: any; - queryTimeout?: any; - partitionOption?: OraclePartitionOption; - partitionSettings?: OraclePartitionSettings; -}; - -// @public -export type OracleTableDataset = Dataset & { - tableName?: any; - schemaTypePropertiesSchema?: any; - table?: any; -}; - -// @public -export type OrcCompressionCodec = string; - -// @public -export type OrcDataset = Dataset & { - location?: DatasetLocationUnion; - orcCompressionCodec?: OrcCompressionCodec; -}; - -// @public -export type OrcFormat = DatasetStorageFormat & {}; - -// @public -export type OrcSink = CopySink & { - storeSettings?: StoreWriteSettingsUnion; -}; - -// @public -export type OrcSource = CopySource & { - storeSettings?: StoreReadSettingsUnion; -}; - -// @public -export interface ParameterSpecification { - defaultValue?: any; - type: ParameterType; -} - -// @public -export type ParameterType = string; - -// @public -export type ParquetCompressionCodec = string; - -// @public -export type ParquetDataset = Dataset & { - location?: DatasetLocationUnion; - compressionCodec?: ParquetCompressionCodec; -}; - -// @public -export type ParquetFormat = DatasetStorageFormat & {}; - -// @public -export type ParquetSink = CopySink & { - storeSettings?: StoreWriteSettingsUnion; -}; - -// @public -export type ParquetSource = CopySource & { - storeSettings?: StoreReadSettingsUnion; -}; - -// @public -export type PaypalLinkedService = LinkedService & { - host: any; - clientId: any; - clientSecret?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type PaypalObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type PaypalSource = TabularSource & { - query?: any; -}; - -// @public -export type PhoenixAuthenticationType = string; - -// @public -export type PhoenixLinkedService = LinkedService & { - host: any; - port?: any; - httpPath?: any; - authenticationType: PhoenixAuthenticationType; - username?: any; - password?: SecretBaseUnion; - enableSsl?: any; - trustedCertPath?: any; - useSystemTrustStore?: any; - allowHostNameCNMismatch?: any; - allowSelfSignedServerCert?: any; - encryptedCredential?: any; -}; - -// @public -export type PhoenixObjectDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type PhoenixSource = TabularSource & { - query?: any; -}; - -// @public -export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { - ifMatch?: string; -} - -// @public -export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: PipelineResource; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { - isRecovery?: boolean; - parameters?: { - [propertyName: string]: any; - }; - referencePipelineRunId?: string; - startActivityName?: string; -} - -// @public -export type PipelineCreatePipelineRunResponse = CreateRunResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: CreateRunResponse; - }; -}; - -// @public -export interface PipelineFolder { - name?: string; -} - -// @public -export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { - ifNoneMatch?: string; -} - -// @public -export type PipelineGetPipelineResponse = PipelineResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: PipelineResource; - }; -}; - -// @public -export type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: PipelineListResponse; - }; -}; - -// @public -export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: PipelineListResponse; - }; -}; - -// @public -export interface PipelineListResponse { - nextLink?: string; - value: PipelineResource[]; -} - -// @public -export interface PipelineReference { - name?: string; - referenceName: string; - type: PipelineReferenceType; -} - -// @public -export type PipelineReferenceType = string; - -// @public -export type PipelineResource = AzureEntityResource & { - [property: string]: any; - description?: string; - activities?: ActivityUnion[]; - parameters?: { - [propertyName: string]: ParameterSpecification; - }; - variables?: { - [propertyName: string]: VariableSpecification; - }; - concurrency?: number; - annotations?: any[]; - runDimensions?: { - [propertyName: string]: any; - }; - folder?: PipelineFolder; -}; - -// @public -export interface PipelineRun { - [property: string]: any; - readonly durationInMs?: number; - readonly invokedBy?: PipelineRunInvokedBy; - readonly isLatest?: boolean; - readonly lastUpdated?: Date; - readonly message?: string; - readonly parameters?: { - [propertyName: string]: string; - }; - readonly pipelineName?: string; - readonly runEnd?: Date; - readonly runGroupId?: string; - readonly runId?: string; - readonly runStart?: Date; - readonly status?: string; -} - -// @public -export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { - isRecursive?: boolean; -} - -// @public -export type PipelineRunGetPipelineRunResponse = PipelineRun & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: PipelineRun; - }; -}; - -// @public -export interface PipelineRunInvokedBy { - readonly id?: string; - readonly invokedByType?: string; - readonly name?: string; -} - -// @public -export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: ActivityRunsQueryResponse; - }; -}; - -// @public -export type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: PipelineRunsQueryResponse; - }; -}; - -// @public -export interface PipelineRunsQueryResponse { - continuationToken?: string; - value: PipelineRun[]; -} - -// @public -export type PluginCurrentState = string; - -// @public -export interface PolybaseSettings { - [property: string]: any; - rejectSampleValue?: any; - rejectType?: PolybaseSettingsRejectType; - rejectValue?: any; - useTypeDefault?: any; -} - -// @public -export type PolybaseSettingsRejectType = string; - -// @public -export type PostgreSqlLinkedService = LinkedService & { - connectionString: any; - password?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type PostgreSqlSource = TabularSource & { - query?: any; -}; - -// @public -export type PostgreSqlTableDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type PrestoAuthenticationType = string; - -// @public -export type PrestoLinkedService = LinkedService & { - host: any; - serverVersion: any; - catalog: any; - port?: any; - authenticationType: PrestoAuthenticationType; - username?: any; - password?: SecretBaseUnion; - enableSsl?: any; - trustedCertPath?: any; - useSystemTrustStore?: any; - allowHostNameCNMismatch?: any; - allowSelfSignedServerCert?: any; - timeZoneID?: any; - encryptedCredential?: any; -}; - -// @public -export type PrestoObjectDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export type PrestoSource = TabularSource & { - query?: any; -}; - -// @public -export interface PrivateEndpoint { - readonly id?: string; -} - -// @public -export type PrivateEndpointConnection = Resource & { - privateEndpoint?: PrivateEndpoint; - privateLinkServiceConnectionState?: PrivateLinkServiceConnectionState; - readonly provisioningState?: string; -}; - -// @public -export interface PrivateLinkServiceConnectionState { - readonly actionsRequired?: string; - description?: string; - status?: string; -} - -// @public -export type ProxyResource = Resource & {}; - -// @public -export interface PurviewConfiguration { - purviewResourceId?: string; -} - -// @public -export interface QueryDataFlowDebugSessionsResponse { - nextLink?: string; - value?: DataFlowDebugSessionInfo[]; -} - -// @public -export type QuickBooksLinkedService = LinkedService & { - endpoint: any; - companyId: any; - consumerKey: any; - consumerSecret: SecretBaseUnion; - accessToken: SecretBaseUnion; - accessTokenSecret: SecretBaseUnion; - useEncryptedEndpoints?: any; - encryptedCredential?: any; -}; - -// @public -export type QuickBooksObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type QuickBooksSource = TabularSource & { - query?: any; -}; - -// @public -export type RecurrenceFrequency = string; - -// @public -export interface RecurrenceSchedule { - [property: string]: any; - hours?: number[]; - minutes?: number[]; - monthDays?: number[]; - monthlyOccurrences?: RecurrenceScheduleOccurrence[]; - weekDays?: DayOfWeek[]; -} - -// @public -export interface RecurrenceScheduleOccurrence { - [property: string]: any; - day?: DayOfWeek; - occurrence?: number; -} - -// @public -export interface RedirectIncompatibleRowSettings { - [property: string]: any; - linkedServiceName: any; - path?: any; -} - -// @public -export interface RedshiftUnloadSettings { - bucketName: any; - s3LinkedServiceName: LinkedServiceReference; -} - -// @public -export type RelationalSource = CopySource & { - query?: any; -}; - -// @public -export type RelationalTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export interface RerunTriggerListResponse { - readonly nextLink?: string; - value: RerunTriggerResource[]; -} - -// @public -export type RerunTriggerResource = AzureEntityResource & { - properties: RerunTumblingWindowTrigger; -}; - -// @public -export type RerunTumblingWindowTrigger = Trigger & { - parentTrigger?: any; - requestedStartTime: Date; - requestedEndTime: Date; - maxConcurrency: number; -}; - -// @public -export interface RerunTumblingWindowTriggerActionParameters { - endTime: Date; - maxConcurrency: number; - startTime: Date; -} - -// @public -export interface Resource { - readonly id?: string; - readonly name?: string; - readonly type?: string; -} - -// @public -export type ResourceIdentityType = "None" | "SystemAssigned"; - -// @public -export type ResponsysLinkedService = LinkedService & { - endpoint: any; - clientId: any; - clientSecret?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type ResponsysObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type ResponsysSource = TabularSource & { - query?: any; -}; - -// @public -export type RestResourceDataset = Dataset & { - relativeUrl?: any; - requestMethod?: any; - requestBody?: any; - additionalHeaders?: any; - paginationRules?: any; -}; - -// @public -export type RestServiceAuthenticationType = string; - -// @public -export type RestServiceLinkedService = LinkedService & { - url: any; - enableServerCertificateValidation?: any; - authenticationType: RestServiceAuthenticationType; - userName?: any; - password?: SecretBaseUnion; - servicePrincipalId?: any; - servicePrincipalKey?: SecretBaseUnion; - tenant?: any; - aadResourceId?: any; - encryptedCredential?: any; -}; - -// @public -export type RestSource = CopySource & { - requestMethod?: any; - requestBody?: any; - additionalHeaders?: any; - paginationRules?: any; - httpRequestTimeout?: any; - requestInterval?: any; -}; - -// @public -export interface RetryPolicy { - count?: any; - intervalInSeconds?: number; -} - -// @public -export interface RunFilterParameters { - continuationToken?: string; - filters?: RunQueryFilter[]; - lastUpdatedAfter: Date; - lastUpdatedBefore: Date; - orderBy?: RunQueryOrderBy[]; -} - -// @public -export interface RunQueryFilter { - operand: RunQueryFilterOperand; - operator: RunQueryFilterOperator; - values: string[]; -} - -// @public -export type RunQueryFilterOperand = string; - -// @public -export type RunQueryFilterOperator = string; - -// @public -export type RunQueryOrder = string; - -// @public -export interface RunQueryOrderBy { - order: RunQueryOrder; - orderBy: RunQueryOrderByField; -} - -// @public -export type RunQueryOrderByField = string; - -// @public -export type SalesforceLinkedService = LinkedService & { - environmentUrl?: any; - username?: any; - password?: SecretBaseUnion; - securityToken?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type SalesforceMarketingCloudLinkedService = LinkedService & { - clientId: any; - clientSecret?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type SalesforceMarketingCloudObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type SalesforceMarketingCloudSource = TabularSource & { - query?: any; -}; - -// @public -export type SalesforceObjectDataset = Dataset & { - objectApiName?: any; -}; - -// @public -export type SalesforceServiceCloudLinkedService = LinkedService & { - environmentUrl?: any; - username?: any; - password?: SecretBaseUnion; - securityToken?: SecretBaseUnion; - extendedProperties?: any; - encryptedCredential?: any; -}; - -// @public -export type SalesforceServiceCloudObjectDataset = Dataset & { - objectApiName?: any; -}; - -// @public -export type SalesforceServiceCloudSink = CopySink & { - writeBehavior?: SalesforceSinkWriteBehavior; - externalIdFieldName?: any; - ignoreNullValues?: any; -}; - -// @public -export type SalesforceServiceCloudSource = CopySource & { - query?: any; - readBehavior?: SalesforceSourceReadBehavior; -}; - -// @public -export type SalesforceSink = CopySink & { - writeBehavior?: SalesforceSinkWriteBehavior; - externalIdFieldName?: any; - ignoreNullValues?: any; -}; - -// @public -export type SalesforceSinkWriteBehavior = string; - -// @public -export type SalesforceSource = TabularSource & { - query?: any; - readBehavior?: SalesforceSourceReadBehavior; -}; - -// @public -export type SalesforceSourceReadBehavior = string; - -// @public -export type SapBwCubeDataset = Dataset & {}; - -// @public -export type SapBWLinkedService = LinkedService & { - server: any; - systemNumber: any; - clientId: any; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type SapBwSource = TabularSource & { - query?: any; -}; - -// @public -export type SapCloudForCustomerLinkedService = LinkedService & { - url: any; - username?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type SapCloudForCustomerResourceDataset = Dataset & { - path: any; -}; - -// @public -export type SapCloudForCustomerSink = CopySink & { - writeBehavior?: SapCloudForCustomerSinkWriteBehavior; -}; - -// @public -export type SapCloudForCustomerSinkWriteBehavior = string; - -// @public -export type SapCloudForCustomerSource = TabularSource & { - query?: any; -}; - -// @public -export type SapEccLinkedService = LinkedService & { - url: string; - username?: string; - password?: SecretBaseUnion; - encryptedCredential?: string; -}; - -// @public -export type SapEccResourceDataset = Dataset & { - path: any; -}; - -// @public -export type SapEccSource = TabularSource & { - query?: any; -}; - -// @public -export type SapHanaAuthenticationType = string; - -// @public -export type SapHanaLinkedService = LinkedService & { - connectionString?: any; - server: any; - authenticationType?: SapHanaAuthenticationType; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type SapHanaPartitionOption = string; - -// @public -export interface SapHanaPartitionSettings { - partitionColumnName?: any; -} - -// @public -export type SapHanaSource = TabularSource & { - query?: any; - packetSize?: any; - partitionOption?: SapHanaPartitionOption; - partitionSettings?: SapHanaPartitionSettings; -}; - -// @public -export type SapHanaTableDataset = Dataset & { - schemaTypePropertiesSchema?: any; - table?: any; -}; - -// @public -export type SapOpenHubLinkedService = LinkedService & { - server: any; - systemNumber: any; - clientId: any; - language?: any; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type SapOpenHubSource = TabularSource & { - excludeLastRequest?: any; - baseRequestId?: any; -}; - -// @public -export type SapOpenHubTableDataset = Dataset & { - openHubDestinationName: any; - excludeLastRequest?: any; - baseRequestId?: any; -}; - -// @public -export type SapTableLinkedService = LinkedService & { - server?: any; - systemNumber?: any; - clientId?: any; - language?: any; - systemId?: any; - userName?: any; - password?: SecretBaseUnion; - messageServer?: any; - messageServerService?: any; - sncMode?: any; - sncMyName?: any; - sncPartnerName?: any; - sncLibraryPath?: any; - sncQop?: any; - logonGroup?: any; - encryptedCredential?: any; -}; - -// @public -export type SapTablePartitionOption = string; - -// @public -export interface SapTablePartitionSettings { - maxPartitionsNumber?: any; - partitionColumnName?: any; - partitionLowerBound?: any; - partitionUpperBound?: any; -} - -// @public -export type SapTableResourceDataset = Dataset & { - tableName: any; -}; - -// @public -export type SapTableSource = TabularSource & { - rowCount?: any; - rowSkips?: any; - rfcTableFields?: any; - rfcTableOptions?: any; - batchSize?: any; - customRfcReadTableFunctionModule?: any; - partitionOption?: SapTablePartitionOption; - partitionSettings?: SapTablePartitionSettings; -}; - -// @public -export type SchedulerCurrentState = string; - -// @public -export type ScheduleTrigger = MultiplePipelineTrigger & { - recurrence: ScheduleTriggerRecurrence; -}; - -// @public -export interface ScheduleTriggerRecurrence { - [property: string]: any; - endTime?: Date; - frequency?: RecurrenceFrequency; - interval?: number; - schedule?: RecurrenceSchedule; - startTime?: Date; - timeZone?: string; -} - -// @public -export interface ScriptAction { - name: string; - parameters?: string; - roles: HdiNodeTypes; - uri: string; -} - -// @public -export interface SecretBase { - type: "SecureString" | "AzureKeyVaultSecret"; -} - -// @public (undocumented) -export type SecretBaseUnion = SecureString | AzureKeyVaultSecretReference; - -// @public -export type SecureString = SecretBase & { - value: string; -}; - -// @public -export type SelfDependencyTumblingWindowTriggerReference = DependencyReference & { - offset: string; - size?: string; -}; - -// @public -export type SelfHostedIntegrationRuntime = IntegrationRuntime & { - linkedInfo?: LinkedIntegrationRuntimeTypeUnion; -}; - -// @public -export type ServiceNowAuthenticationType = string; - -// @public -export type ServiceNowLinkedService = LinkedService & { - endpoint: any; - authenticationType: ServiceNowAuthenticationType; - username?: any; - password?: SecretBaseUnion; - clientId?: any; - clientSecret?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type ServiceNowObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type ServiceNowSource = TabularSource & { - query?: any; -}; - -// @public -export type SetVariableActivity = Activity & { - variableName?: string; - value?: any; -}; - -// @public -export type SftpAuthenticationType = string; - -// @public -export type SftpLocation = DatasetLocation & {}; - -// @public -export type SftpReadSettings = StoreReadSettings & { - recursive?: any; - wildcardFolderPath?: any; - wildcardFileName?: any; - modifiedDatetimeStart?: any; - modifiedDatetimeEnd?: any; -}; - -// @public -export type SftpServerLinkedService = LinkedService & { - host: any; - port?: any; - authenticationType?: SftpAuthenticationType; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; - privateKeyPath?: any; - privateKeyContent?: SecretBaseUnion; - passPhrase?: SecretBaseUnion; - skipHostKeyValidation?: any; - hostKeyFingerprint?: any; -}; - -// @public -export type SftpWriteSettings = StoreWriteSettings & { - operationTimeout?: any; -}; - -// @public -export type ShopifyLinkedService = LinkedService & { - host: any; - accessToken?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type ShopifyObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type ShopifySource = TabularSource & { - query?: any; -}; - -// @public -export interface Sku { - capacity?: number; - name?: string; - tier?: string; -} - -// @public -export type SparkAuthenticationType = string; - -// @public (undocumented) -export interface SparkBatchJob { - appId?: string | null; - appInfo?: { - [propertyName: string]: string; - } | null; - artifactId?: string; - errors?: SparkServiceError[]; - id: number; - jobType?: SparkJobType; - // (undocumented) - livyInfo?: SparkBatchJobState; - logLines?: string[] | null; - name?: string; - plugin?: SparkServicePlugin; - result?: SparkBatchJobResultType; - scheduler?: SparkScheduler; - sparkPoolName?: string; - state?: string; - submitterId?: string; - submitterName?: string; - tags?: { - [propertyName: string]: string; - }; - workspaceName?: string; -} - -// @public -export type SparkBatchJobResultType = string; - -// @public (undocumented) -export interface SparkBatchJobState { - currentState?: string; - deadAt?: Date | null; - // (undocumented) - jobCreationRequest?: SparkRequest; - notStartedAt?: Date | null; - recoveringAt?: Date | null; - runningAt?: Date | null; - startingAt?: Date | null; - successAt?: Date | null; - terminatedAt?: Date | null; -} - -// @public -export type SparkErrorSource = string; - -// @public -export interface SparkJobDefinition { - [property: string]: any; - description?: string; - jobProperties: SparkJobProperties; - language?: string; - requiredSparkVersion?: string; - targetBigDataPool: BigDataPoolReference; -} - -// @public -export interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams extends coreHttp.OperationOptions { - ifMatch?: string; -} - -// @public -export type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkJobDefinitionResource; - }; -}; - -// @public -export type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkBatchJob; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkBatchJob; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams extends coreHttp.OperationOptions { - ifNoneMatch?: string; -} - -// @public -export type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkJobDefinitionResource; - }; -}; - -// @public -export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkJobDefinitionsListResponse; - }; -}; - -// @public -export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkJobDefinitionsListResponse; - }; -}; - -// @public -export type SparkJobDefinitionResource = AzureEntityResource & { - properties: SparkJobDefinition; -}; - -// @public -export interface SparkJobDefinitionsListResponse { - nextLink?: string; - value: SparkJobDefinitionResource[]; -} - -// @public -export interface SparkJobProperties { - [property: string]: any; - archives?: string[]; - args?: string[]; - className?: string; - conf?: any; - driverCores: number; - driverMemory: string; - executorCores: number; - executorMemory: string; - file: string; - files?: string[]; - jars?: string[]; - name?: string; - numExecutors: number; -} - -// @public -export type SparkJobReferenceType = string; - -// @public -export type SparkJobType = string; - -// @public -export type SparkLinkedService = LinkedService & { - host: any; - port: any; - serverType?: SparkServerType; - thriftTransportProtocol?: SparkThriftTransportProtocol; - authenticationType: SparkAuthenticationType; - username?: any; - password?: SecretBaseUnion; - httpPath?: any; - enableSsl?: any; - trustedCertPath?: any; - useSystemTrustStore?: any; - allowHostNameCNMismatch?: any; - allowSelfSignedServerCert?: any; - encryptedCredential?: any; -}; - -// @public -export type SparkObjectDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public (undocumented) -export interface SparkRequest { - // (undocumented) - archives?: string[]; - // (undocumented) - arguments?: string[]; - // (undocumented) - className?: string; - configuration?: { - [propertyName: string]: string; - }; - // (undocumented) - driverCores?: number; - // (undocumented) - driverMemory?: string; - // (undocumented) - executorCores?: number; - // (undocumented) - executorCount?: number; - // (undocumented) - executorMemory?: string; - // (undocumented) - file?: string; - // (undocumented) - files?: string[]; - // (undocumented) - jars?: string[]; - // (undocumented) - name?: string; - // (undocumented) - pythonFiles?: string[]; -} - -// @public (undocumented) -export interface SparkScheduler { - // (undocumented) - cancellationRequestedAt?: Date; - // (undocumented) - currentState?: SchedulerCurrentState; - // (undocumented) - endedAt?: Date | null; - // (undocumented) - scheduledAt?: Date | null; - // (undocumented) - submittedAt?: Date | null; -} - -// @public -export type SparkServerType = string; - -// @public (undocumented) -export interface SparkServiceError { - // (undocumented) - errorCode?: string; - // (undocumented) - message?: string; - // (undocumented) - source?: SparkErrorSource; -} - -// @public (undocumented) -export interface SparkServicePlugin { - // (undocumented) - cleanupStartedAt?: Date | null; - // (undocumented) - currentState?: PluginCurrentState; - // (undocumented) - monitoringStartedAt?: Date | null; - // (undocumented) - preparationStartedAt?: Date | null; - // (undocumented) - resourceAcquisitionStartedAt?: Date | null; - // (undocumented) - submissionStartedAt?: Date | null; -} - -// @public -export type SparkSource = TabularSource & { - query?: any; -}; - -// @public -export type SparkThriftTransportProtocol = string; - -// @public -export interface SqlConnection { - [property: string]: any; - name: string; - type: SqlConnectionType; -} - -// @public -export type SqlConnectionType = string; - -// @public -export type SqlDWSink = CopySink & { - preCopyScript?: any; - allowPolyBase?: any; - polyBaseSettings?: PolybaseSettings; - allowCopyCommand?: any; - copyCommandSettings?: DWCopyCommandSettings; - tableOption?: any; -}; - -// @public -export type SqlDWSource = TabularSource & { - sqlReaderQuery?: any; - sqlReaderStoredProcedureName?: any; - storedProcedureParameters?: any; -}; - -// @public -export type SqlMISink = CopySink & { - sqlWriterStoredProcedureName?: any; - sqlWriterTableType?: any; - preCopyScript?: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; - storedProcedureTableTypeParameterName?: any; - tableOption?: any; -}; - -// @public -export type SqlMISource = TabularSource & { - sqlReaderQuery?: any; - sqlReaderStoredProcedureName?: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; - produceAdditionalTypes?: any; -}; - -// @public -export type SqlPool = TrackedResource & { - sku?: Sku; - maxSizeBytes?: number; - collation?: string; - sourceDatabaseId?: string; - recoverableDatabaseId?: string; - provisioningState?: string; - status?: string; - restorePointInTime?: string; - createMode?: string; - creationDate?: Date; -}; - -// @public -export interface SqlPoolInfoListResult { - nextLink?: string; - value?: SqlPool[]; -} - -// @public -export interface SqlPoolReference { - referenceName: string; - type: SqlPoolReferenceType; -} - -// @public -export type SqlPoolReferenceType = string; - -// @public -export type SqlPoolsGetResponse = SqlPool & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SqlPool; - }; -}; - -// @public -export type SqlPoolsListResponse = SqlPoolInfoListResult & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SqlPoolInfoListResult; - }; -}; - -// @public -export type SqlPoolStoredProcedureActivity = Activity & { - sqlPool: SqlPoolReference; - storedProcedureName: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; -}; - -// @public -export interface SqlScript { - [property: string]: any; - content: SqlScriptContent; - description?: string; - type?: SqlScriptType; -} - -// @public -export interface SqlScriptContent { - [property: string]: any; - currentConnection: SqlConnection; - metadata?: SqlScriptMetadata; - query: string; -} - -// @public -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { - ifMatch?: string; -} - -// @public -export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SqlScriptResource; - }; -}; - -// @public -export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { - ifNoneMatch?: string; -} - -// @public -export type SqlScriptGetSqlScriptResponse = SqlScriptResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SqlScriptResource; - }; -}; - -// @public -export type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SqlScriptsListResponse; - }; -}; - -// @public -export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SqlScriptsListResponse; - }; -}; - -// @public -export interface SqlScriptMetadata { - [property: string]: any; - language?: string; -} - -// @public -export interface SqlScriptResource { - readonly etag?: string; - readonly id?: string; - name: string; - properties: SqlScript; - readonly type?: string; -} - -// @public -export interface SqlScriptsListResponse { - nextLink?: string; - value: SqlScriptResource[]; -} - -// @public -export type SqlScriptType = string; - -// @public -export type SqlServerLinkedService = LinkedService & { - connectionString: any; - userName?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type SqlServerSink = CopySink & { - sqlWriterStoredProcedureName?: any; - sqlWriterTableType?: any; - preCopyScript?: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; - storedProcedureTableTypeParameterName?: any; - tableOption?: any; -}; - -// @public -export type SqlServerSource = TabularSource & { - sqlReaderQuery?: any; - sqlReaderStoredProcedureName?: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; - produceAdditionalTypes?: any; -}; - -// @public -export type SqlServerStoredProcedureActivity = ExecutionActivity & { - storedProcedureName: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; -}; - -// @public -export type SqlServerTableDataset = Dataset & { - tableName?: any; - schemaTypePropertiesSchema?: any; - table?: any; -}; - -// @public -export type SqlSink = CopySink & { - sqlWriterStoredProcedureName?: any; - sqlWriterTableType?: any; - preCopyScript?: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; - storedProcedureTableTypeParameterName?: any; - tableOption?: any; -}; - -// @public -export type SqlSource = TabularSource & { - sqlReaderQuery?: any; - sqlReaderStoredProcedureName?: any; - storedProcedureParameters?: { - [propertyName: string]: StoredProcedureParameter; - }; -}; - -// @public -export type SquareLinkedService = LinkedService & { - host: any; - clientId: any; - clientSecret?: SecretBaseUnion; - redirectUri: any; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type SquareObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type SquareSource = TabularSource & { - query?: any; -}; - -// @public -export interface SsisAccessCredential { - domain: any; - password: SecretBaseUnion; - userName: any; -} - -// @public -export interface SsisChildPackage { - packageContent: any; - packageLastModifiedDate?: string; - packageName?: string; - packagePath: any; -} - -// @public -export interface SsisExecutionCredential { - domain: any; - password: SecureString; - userName: any; -} - -// @public -export interface SsisExecutionParameter { - value: any; -} - -// @public -export interface SsisLogLocation { - accessCredential?: SsisAccessCredential; - logPath: any; - logRefreshInterval?: any; - type: SsisLogLocationType; -} - -// @public -export type SsisLogLocationType = string; - -// @public -export interface SsisObjectMetadataStatusResponse { - error?: string; - name?: string; - properties?: string; - status?: string; -} - -// @public -export interface SsisPackageLocation { - accessCredential?: SsisAccessCredential; - childPackages?: SsisChildPackage[]; - configurationPath?: any; - packageContent?: any; - packageLastModifiedDate?: string; - packageName?: string; - packagePassword?: SecretBaseUnion; - packagePath?: any; - type?: SsisPackageLocationType; -} - -// @public -export type SsisPackageLocationType = string; - -// @public -export interface SsisPropertyOverride { - isSensitive?: boolean; - value: any; -} - -// @public -export interface StagingSettings { - [property: string]: any; - enableCompression?: any; - linkedServiceName: LinkedServiceReference; - path?: any; -} - -// @public -export interface StartDataFlowDebugSessionRequest { - dataFlow?: DataFlowResource; - datasets?: DatasetResource[]; - debugSettings?: any; - incrementalDebug?: boolean; - linkedServices?: LinkedServiceResource[]; - sessionId?: string; - staging?: any; -} - -// @public -export interface StartDataFlowDebugSessionResponse { - jobVersion?: string; -} - -// @public -export interface StoredProcedureParameter { - type?: StoredProcedureParameterType; - value?: any; -} - -// @public -export type StoredProcedureParameterType = string; - -// @public -export interface StoreReadSettings { - [property: string]: any; - maxConcurrentConnections?: any; - type: "AzureBlobStorageReadSettings" | "AzureBlobFSReadSettings" | "AzureDataLakeStoreReadSettings" | "AmazonS3ReadSettings" | "FileServerReadSettings" | "AzureFileStorageReadSettings" | "GoogleCloudStorageReadSettings" | "FtpReadSettings" | "SftpReadSettings" | "HttpReadSettings" | "HdfsReadSettings"; -} - -// @public (undocumented) -export type StoreReadSettingsUnion = AzureBlobStorageReadSettings | AzureBlobFSReadSettings | AzureDataLakeStoreReadSettings | AmazonS3ReadSettings | FileServerReadSettings | AzureFileStorageReadSettings | GoogleCloudStorageReadSettings | FtpReadSettings | SftpReadSettings | HttpReadSettings | HdfsReadSettings; - -// @public -export interface StoreWriteSettings { - [property: string]: any; - copyBehavior?: any; - maxConcurrentConnections?: any; - type: "SftpWriteSettings" | "AzureBlobStorageWriteSettings" | "AzureBlobFSWriteSettings" | "AzureDataLakeStoreWriteSettings" | "FileServerWriteSettings"; -} - -// @public (undocumented) -export type StoreWriteSettingsUnion = SftpWriteSettings | AzureBlobStorageWriteSettings | AzureBlobFSWriteSettings | AzureDataLakeStoreWriteSettings | FileServerWriteSettings; - -// @public -export type SubResource = AzureEntityResource & {}; - -// @public -export interface SubResourceDebugResource { - name?: string; -} - -// @public -export type SwitchActivity = Activity & { - on: Expression; - cases?: SwitchCase[]; - defaultActivities?: ActivityUnion[]; -}; - -// @public -export interface SwitchCase { - activities?: ActivityUnion[]; - value?: string; -} - -// @public -export type SybaseAuthenticationType = string; - -// @public -export type SybaseLinkedService = LinkedService & { - server: any; - database: any; - schema?: any; - authenticationType?: SybaseAuthenticationType; - username?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type SybaseSource = TabularSource & { - query?: any; -}; - -// @public -export type SybaseTableDataset = Dataset & { - tableName?: any; -}; - -// @public -export type SynapseNotebookActivity = ExecutionActivity & { - notebook: SynapseNotebookReference; - parameters?: { - [propertyName: string]: any; - }; -}; - -// @public -export interface SynapseNotebookReference { - referenceName: string; - type: NotebookReferenceType; -} - -// @public -export type SynapseSparkJobDefinitionActivity = ExecutionActivity & { - sparkJob: SynapseSparkJobReference; -}; - -// @public -export interface SynapseSparkJobReference { - referenceName: string; - type: SparkJobReferenceType; -} - -// @public -export type TabularSource = CopySource & { - queryTimeout?: any; -}; - -// @public (undocumented) -export type TabularSourceUnion = AzureTableSource | InformixSource | Db2Source | OdbcSource | MySqlSource | PostgreSqlSource | SybaseSource | SapBwSource | SalesforceSource | SapCloudForCustomerSource | SapEccSource | SapHanaSource | SapOpenHubSource | SapTableSource | SqlSource | SqlServerSource | AzureSqlSource | SqlMISource | SqlDWSource | AzureMySqlSource | TeradataSource | CassandraSource | AmazonMWSSource | AzurePostgreSqlSource | ConcurSource | CouchbaseSource | DrillSource | EloquaSource | GoogleBigQuerySource | GreenplumSource | HBaseSource | HiveSource | HubspotSource | ImpalaSource | JiraSource | MagentoSource | MariaDBSource | AzureMariaDBSource | MarketoSource | PaypalSource | PhoenixSource | PrestoSource | QuickBooksSource | ServiceNowSource | ShopifySource | SparkSource | SquareSource | XeroSource | ZohoSource | NetezzaSource | VerticaSource | SalesforceMarketingCloudSource | ResponsysSource | DynamicsAXSource | OracleServiceCloudSource | GoogleAdWordsSource | AmazonRedshiftSource; - -// @public -export type TabularTranslator = CopyTranslator & { - columnMappings?: any; - schemaMapping?: any; - collectionReference?: any; - mapComplexValuesToString?: any; - mappings?: any; -}; - -// @public -export type TeradataAuthenticationType = string; - -// @public -export type TeradataLinkedService = LinkedService & { - connectionString?: any; - server?: any; - authenticationType?: TeradataAuthenticationType; - username?: any; - password?: SecretBaseUnion; - encryptedCredential?: any; -}; - -// @public -export type TeradataPartitionOption = string; - -// @public -export interface TeradataPartitionSettings { - partitionColumnName?: any; - partitionLowerBound?: any; - partitionUpperBound?: any; -} - -// @public -export type TeradataSource = TabularSource & { - query?: any; - partitionOption?: TeradataPartitionOption; - partitionSettings?: TeradataPartitionSettings; -}; - -// @public -export type TeradataTableDataset = Dataset & { - database?: any; - table?: any; -}; - -// @public -export type TextFormat = DatasetStorageFormat & { - columnDelimiter?: any; - rowDelimiter?: any; - escapeChar?: any; - quoteChar?: any; - nullValue?: any; - encodingName?: any; - treatEmptyAsNull?: any; - skipLineCount?: any; - firstRowAsHeader?: any; -}; - -// @public -export type TrackedResource = Resource & { - tags?: { - [propertyName: string]: string; - }; - location: string; -}; - -// @public -export interface Transformation { - description?: string; - name: string; -} - -// @public -export interface Trigger { - [property: string]: any; - annotations?: any[]; - description?: string; - readonly runtimeState?: TriggerRuntimeState; - type: "RerunTumblingWindowTrigger" | "MultiplePipelineTrigger" | "ScheduleTrigger" | "BlobTrigger" | "BlobEventsTrigger" | "TumblingWindowTrigger" | "ChainingTrigger"; -} - -// @public -export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { - ifMatch?: string; -} - -// @public -export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: TriggerResource; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface TriggerDependencyProvisioningStatus { - provisioningStatus: string; - triggerName: string; -} - -// @public -export type TriggerDependencyReference = DependencyReference & { - referenceTrigger: TriggerReference; -}; - -// @public (undocumented) -export type TriggerDependencyReferenceUnion = TumblingWindowTriggerDependencyReference; - -// @public -export type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: TriggerSubscriptionOperationStatus; - }; -}; - -// @public -export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { - ifNoneMatch?: string; -} - -// @public -export type TriggerGetTriggerResponse = TriggerResource & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: TriggerResource; - }; -}; - -// @public -export type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: TriggerListResponse; - }; -}; - -// @public -export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: TriggerListResponse; - }; -}; - -// @public -export interface TriggerListResponse { - nextLink?: string; - value: TriggerResource[]; -} - -// @public -export interface TriggerPipelineReference { - parameters?: { - [propertyName: string]: any; - }; - pipelineReference?: PipelineReference; -} - -// @public -export interface TriggerReference { - referenceName: string; - type: TriggerReferenceType; -} - -// @public -export type TriggerReferenceType = string; - -// @public -export type TriggerResource = AzureEntityResource & { - properties: TriggerUnion; -}; - -// @public -export interface TriggerRun { - [property: string]: any; - readonly message?: string; - readonly properties?: { - [propertyName: string]: string; - }; - readonly status?: TriggerRunStatus; - readonly triggeredPipelines?: { - [propertyName: string]: string; - }; - readonly triggerName?: string; - readonly triggerRunId?: string; - readonly triggerRunTimestamp?: Date; - readonly triggerType?: string; -} - -// @public -export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: TriggerRunsQueryResponse; - }; -}; - -// @public -export interface TriggerRunsQueryResponse { - continuationToken?: string; - value: TriggerRun[]; -} - -// @public -export type TriggerRunStatus = string; - -// @public -export type TriggerRuntimeState = string; - -// @public -export type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: TriggerSubscriptionOperationStatus; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export interface TriggerSubscriptionOperationStatus { - readonly status?: EventSubscriptionStatus; - readonly triggerName?: string; -} - -// @public (undocumented) -export type TriggerUnion = RerunTumblingWindowTrigger | MultiplePipelineTriggerUnion | TumblingWindowTrigger | ChainingTrigger; - -// @public -export type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: TriggerSubscriptionOperationStatus; - [LROSYM]: LROResponseInfo; - }; -}; - -// @public -export type TumblingWindowFrequency = string; - -// @public -export type TumblingWindowTrigger = Trigger & { - pipeline: TriggerPipelineReference; - frequency: TumblingWindowFrequency; - interval: number; - startTime: Date; - endTime?: Date; - delay?: any; - maxConcurrency: number; - retryPolicy?: RetryPolicy; - dependsOn?: DependencyReferenceUnion[]; -}; - -// @public -export type TumblingWindowTriggerDependencyReference = TriggerDependencyReference & { - offset?: string; - size?: string; -}; - -// @public -export type Type = string; - -// @public -export type UntilActivity = Activity & { - expression: Expression; - timeout?: any; - activities: ActivityUnion[]; -}; - -// @public -export interface UserProperty { - name: string; - value: any; -} - -// @public -export type ValidationActivity = Activity & { - timeout?: any; - sleep?: any; - minimumSize?: any; - childItems?: any; - dataset: DatasetReference; -}; - -// @public -export interface VariableSpecification { - defaultValue?: any; - type: VariableType; -} - -// @public -export type VariableType = string; - -// @public -export type VerticaLinkedService = LinkedService & { - connectionString?: any; - pwd?: AzureKeyVaultSecretReference; - encryptedCredential?: any; -}; - -// @public -export type VerticaSource = TabularSource & { - query?: any; -}; - -// @public -export type VerticaTableDataset = Dataset & { - tableName?: any; - table?: any; - schemaTypePropertiesSchema?: any; -}; - -// @public -export interface VirtualNetworkProfile { - computeSubnetId?: string; -} - -// @public -export type WaitActivity = Activity & { - waitTimeInSeconds: number; -}; - -// @public -export type WebActivity = ExecutionActivity & { - method: WebActivityMethod; - url: any; - headers?: any; - body?: any; - authentication?: WebActivityAuthentication; - datasets?: DatasetReference[]; - linkedServices?: LinkedServiceReference[]; - connectVia?: IntegrationRuntimeReference; -}; - -// @public -export interface WebActivityAuthentication { - password?: SecretBaseUnion; - pfx?: SecretBaseUnion; - resource?: string; - type: string; - username?: string; -} - -// @public -export type WebActivityMethod = string; - -// @public -export type WebAnonymousAuthentication = WebLinkedServiceTypeProperties & {}; - -// @public -export type WebAuthenticationType = string; - -// @public -export type WebBasicAuthentication = WebLinkedServiceTypeProperties & { - username: any; - password: SecretBaseUnion; -}; - -// @public -export type WebClientCertificateAuthentication = WebLinkedServiceTypeProperties & { - pfx: SecretBaseUnion; - password: SecretBaseUnion; -}; - -// @public -export type WebHookActivity = Activity & { - method: WebHookActivityMethod; - url: any; - timeout?: string; - headers?: any; - body?: any; - authentication?: WebActivityAuthentication; - reportStatusOnCallBack?: any; -}; - -// @public -export type WebHookActivityMethod = string; - -// @public -export type WebLinkedService = LinkedService & { - typeProperties: WebLinkedServiceTypePropertiesUnion; -}; - -// @public -export interface WebLinkedServiceTypeProperties { - authenticationType: "Anonymous" | "Basic" | "ClientCertificate"; - url: any; -} - -// @public (undocumented) -export type WebLinkedServiceTypePropertiesUnion = WebAnonymousAuthentication | WebBasicAuthentication | WebClientCertificateAuthentication; - -// @public -export type WebSource = CopySource & {}; - -// @public -export type WebTableDataset = Dataset & { - index: any; - path?: any; -}; - -// @public -export type Workspace = TrackedResource & { - identity?: ManagedIdentity; - defaultDataLakeStorage?: DataLakeStorageAccountDetails; - sqlAdministratorLoginPassword?: string; - managedResourceGroupName?: string; - readonly provisioningState?: string; - sqlAdministratorLogin?: string; - virtualNetworkProfile?: VirtualNetworkProfile; - connectivityEndpoints?: { - [propertyName: string]: string; - }; - managedVirtualNetwork?: string; - privateEndpointConnections?: PrivateEndpointConnection[]; - encryption?: EncryptionDetails; - readonly workspaceUID?: string; - readonly extraProperties?: { - [propertyName: string]: any; - }; - managedVirtualNetworkSettings?: ManagedVirtualNetworkSettings; - workspaceRepositoryConfiguration?: WorkspaceRepositoryConfiguration; - purviewConfiguration?: PurviewConfiguration; -}; - -// @public -export type WorkspaceGetResponse = Workspace & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: Workspace; - }; -}; - -// @public -export interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams extends coreHttp.OperationOptions { - clientRequestId?: string; -} - -// @public -export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: GitHubAccessTokenResponse; - }; -}; - -// @public -export interface WorkspaceIdentity { - readonly principalId?: string; - readonly tenantId?: string; - type: "SystemAssigned"; -} - -// @public -export interface WorkspaceKeyDetails { - keyVaultUrl?: string; - name?: string; -} - -// @public -export interface WorkspaceRepositoryConfiguration { - accountName?: string; - collaborationBranch?: string; - hostName?: string; - projectName?: string; - repositoryName?: string; - rootFolder?: string; - type?: string; -} - -// @public -export interface WorkspaceUpdateParameters { - identity?: WorkspaceIdentity; - tags?: { - [propertyName: string]: string; - }; -} - -// @public -export type XeroLinkedService = LinkedService & { - host: any; - consumerKey?: SecretBaseUnion; - privateKey?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type XeroObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type XeroSource = TabularSource & { - query?: any; -}; - -// @public -export type ZohoLinkedService = LinkedService & { - endpoint: any; - accessToken?: SecretBaseUnion; - useEncryptedEndpoints?: any; - useHostVerification?: any; - usePeerVerification?: any; - encryptedCredential?: any; -}; - -// @public -export type ZohoObjectDataset = Dataset & { - tableName?: any; -}; - -// @public -export type ZohoSource = TabularSource & { - query?: any; -}; - - -// Warnings were encountered during analysis: -// -// src/models/index.ts:15186:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts - -// (No @packageDocumentation comment for this package) - -``` diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts index 3c6f3a11f2e1..8dfd8150282a 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -38,15 +38,39 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; } - const defaultPipelines = Array.isArray(options.requestPolicyFactories) - ? options.requestPolicyFactories - : (coreHttp.createPipelineFromOptions(options) - .requestPolicyFactories as coreHttp.RequestPolicyFactory[]); - - options = { - ...options, - requestPolicyFactories: [lroPolicy(), ...defaultPipelines] - }; + if (!options.credentialScopes) { + options.credentialScopes = ["https://microsoft.com"]; + } + + // Building the request policy fatories based on the passed factories and the + // any required factories needed by the client. + if (Array.isArray(options.requestPolicyFactories)) { + // When an array of factories is passed in, we'll just add the required factories, + // in this case lroPolicy(). It is important to note that passing an array of factories + // to a new client, bypasses core-http default factories. Just the pipelines provided will be run. + options.requestPolicyFactories = [ + lroPolicy(), + ...options.requestPolicyFactories + ]; + } else if (options.requestPolicyFactories) { + // When we were passed a requestPolicyFactories as a function, we'll create a new one that adds the factories provided + // in the options plus the required policies. When using this path, the pipelines passed to the client will be added to the + // default policies added by core-http + const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [ + lroPolicy() + ]; + options.requestPolicyFactories = (defaultFactories) => [ + ...optionsPolicies, + ...defaultFactories + ]; + } else { + // In case no request policy factories were provided, we'll just need to create a function that will add + // the lroPolicy to the default pipelines added by core-http + options.requestPolicyFactories = (defaultFactories) => [ + lroPolicy(), + ...defaultFactories + ]; + } super(credentials, options); diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index 3c2fd2e5d6c9..0b5d4232590f 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -6,7 +6,11 @@ import { FinalStateVia, LROSYM } from "./models"; -import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; +import { + OperationSpec, + OperationArguments, + OperationResponse +} from "@azure/core-http"; import { terminalStates } from "./constants"; import { SendOperationFn } from "."; @@ -17,11 +21,14 @@ export function createAzureAsyncOperationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); + throw new Error( + "Expected lroData to be defined for Azure-AsyncOperation strategy" + ); } let currentOperation = initialOperation; - let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; + let lastKnownPollingUrl = + lroData.azureAsyncOperation || lroData.operationLocation; return { isTerminal: () => { @@ -52,12 +59,17 @@ export function createAzureAsyncOperationStrategy( const initialOperationResult = initialOperation.result._response[LROSYM]; const currentOperationResult = currentOperation.result._response[LROSYM]; - if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { + if ( + !shouldPerformFinalGet(initialOperationResult, currentOperationResult) + ) { return currentOperation; } if (initialOperationResult?.requestMethod === "PUT") { - currentOperation = await sendFinalGet(initialOperation, sendOperationFn); + currentOperation = await sendFinalGet( + initialOperation, + sendOperationFn + ); return currentOperation; } @@ -65,20 +77,29 @@ export function createAzureAsyncOperationStrategy( if (initialOperationResult?.location) { switch (finalStateVia) { case "original-uri": - currentOperation = await sendFinalGet(initialOperation, sendOperationFn); + currentOperation = await sendFinalGet( + initialOperation, + sendOperationFn + ); return currentOperation; case "azure-async-operation": return currentOperation; case "location": default: - const location = initialOperationResult.location || currentOperationResult?.location; + const location = + initialOperationResult.location || + currentOperationResult?.location; if (!location) { throw new Error("Couldn't determine final GET URL from location"); } - return await sendFinalGet(initialOperation, sendOperationFn, location); + return await sendFinalGet( + initialOperation, + sendOperationFn, + location + ); } } @@ -156,7 +177,10 @@ function getCompositeMappers(responses: { }, {} as { [responseCode: string]: OperationResponse }); } -function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { +function shouldPerformFinalGet( + initialResult?: LROResponseInfo, + currentResult?: LROResponseInfo +) { const { status } = currentResult || {}; const { requestMethod: initialRequestMethod, location } = initialResult || {}; if (status && status.toLowerCase() !== "succeeded") { diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index 49333c25b430..62ed188e691e 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -44,7 +44,10 @@ export function createBodyPollingStrategy( }; // Execute the polling operation - initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); + initialOperation.result = await sendOperation( + initialOperation.args, + pollingSpec + ); return initialOperation; } }; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index 008f60177503..cfcfa8efd0a7 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -8,7 +8,9 @@ export function createLocationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); + throw new Error( + "Expected lroData to be defined for Azure-AsyncOperation strategy" + ); } let currentOperation = initialOperation; @@ -49,7 +51,8 @@ export function createLocationStrategy( const result = await sendOperationFn(pollingArgs, pollingSpec); // Update latest polling url - lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; + lastKnownPollingUrl = + result._response[LROSYM]?.location || lastKnownPollingUrl; // Update lastOperation result currentOperation = { diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index 1e1ec61db3b3..e686401a5cf9 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -21,8 +21,12 @@ class LROPolicy extends BaseRequestPolicy { super(nextPolicy, options); } - public async sendRequest(webResource: WebResource): Promise { - let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); + public async sendRequest( + webResource: WebResource + ): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest( + webResource + ); const _lroData = getLROData(result); result[LROSYM] = _lroData; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index 9ab5f25fb49a..72d979bd46a9 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -1,6 +1,17 @@ import { Poller } from "@azure/core-lro"; -import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; -import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; +import { + OperationSpec, + OperationArguments, + delay, + RestError +} from "@azure/core-http"; +import { + BaseResult, + LROOperationState, + LROOperationStep, + FinalStateVia, + LROSYM +} from "./models"; import { makeOperation } from "./operation"; import { createBodyPollingStrategy } from "./bodyPollingStrategy"; import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; @@ -59,7 +70,11 @@ export class LROPoller extends Poller< result: initialOperationResult }; - const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); + const pollingStrategy = getPollingStrategy( + initialOperation, + sendOperation, + finalStateVia + ); const state: LROOperationState = { // Initial operation will become the last operation @@ -112,7 +127,11 @@ function getPollingStrategy( } if (lroData.azureAsyncOperation || lroData.operationLocation) { - return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); + return createAzureAsyncOperationStrategy( + initialOperation, + sendOperationFn, + finalStateVia + ); } if (lroData.location) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index 704c731cf370..ed0dd9132876 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -8,7 +8,10 @@ import { import { PollOperationState, PollOperation } from "@azure/core-lro"; export const LROSYM = Symbol("LROData"); -export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; +export type FinalStateVia = + | "azure-async-operation" + | "location" + | "original-uri"; export interface LROResponseInfo { requestMethod: HttpMethods; @@ -44,7 +47,8 @@ export interface LROOperationStep { result: TResult; } -export interface LROOperationState extends PollOperationState { +export interface LROOperationState + extends PollOperationState { lastOperation: LROOperationStep; initialOperation: LROOperationStep; pollingStrategy: LROStrategy; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index e1e3fa2e1126..9b37277e7b2a 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -41,7 +41,9 @@ async function update( const currentLroData = currentResponse.result._response[LROSYM]; if (!currentLroData) { - throw new Error("Expected lroData to be defined for updating LRO operation"); + throw new Error( + "Expected lroData to be defined for updating LRO operation" + ); } if (state.result) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index 3c518804edec..e3289b95905b 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -21,10 +21,17 @@ export function shouldDeserializeLRO(finalStateVia?: string) { isInitialRequest = false; } - if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { + if ( + initialOperationInfo.azureAsyncOperation || + initialOperationInfo.operationLocation + ) { return ( !isInitialRequest && - isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) + isAsyncOperationFinalResponse( + response, + initialOperationInfo, + finalStateVia + ) ); } @@ -62,7 +69,10 @@ function isAsyncOperationFinalResponse( return true; } - if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { + if ( + initialOperationInfo.requestMethod !== "PUT" && + !initialOperationInfo.location + ) { return true; } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 274f837efafd..4bcfe16ee83c 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -199,7 +199,9 @@ export type TriggerUnion = | TumblingWindowTrigger | ChainingTrigger; export type DataFlowUnion = MappingDataFlow; -export type IntegrationRuntimeUnion = ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; +export type IntegrationRuntimeUnion = + | ManagedIntegrationRuntime + | SelfHostedIntegrationRuntime; export type SecretBaseUnion = SecureString | AzureKeyVaultSecretReference; export type DatasetLocationUnion = | AzureBlobStorageLocation @@ -350,7 +352,10 @@ export type ExecutionActivityUnion = | ExecuteDataFlowActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity; -export type MultiplePipelineTriggerUnion = ScheduleTrigger | BlobTrigger | BlobEventsTrigger; +export type MultiplePipelineTriggerUnion = + | ScheduleTrigger + | BlobTrigger + | BlobEventsTrigger; export type TabularSourceUnion = | AzureTableSource | InformixSource @@ -3129,7 +3134,12 @@ export interface DatasetStorageFormat { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; + type: + | "TextFormat" + | "JsonFormat" + | "AvroFormat" + | "OrcFormat" + | "ParquetFormat"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -3319,7 +3329,10 @@ export interface FormatWriteSettings { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; + type: + | "AvroWriteSettings" + | "DelimitedTextWriteSettings" + | "JsonWriteSettings"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -14594,7 +14607,8 @@ export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceReso /** * Optional parameters. */ -export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14664,7 +14678,8 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { /** * Optional parameters. */ -export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { +export interface DatasetCreateOrUpdateDatasetOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14698,7 +14713,8 @@ export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { /** * Optional parameters. */ -export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { +export interface DatasetGetDatasetOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14768,7 +14784,8 @@ export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { /** * Optional parameters. */ -export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14802,7 +14819,8 @@ export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { +export interface PipelineGetPipelineOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14832,7 +14850,8 @@ export type PipelineGetPipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams + extends coreHttp.OperationOptions { /** * Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ @@ -14954,7 +14973,8 @@ export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { /** * Optional parameters. */ -export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { +export interface PipelineRunCancelPipelineRunOptionalParams + extends coreHttp.OperationOptions { /** * If true, cancel all the Child pipelines that are triggered by the current pipeline. */ @@ -14984,7 +15004,8 @@ export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { /** * Optional parameters. */ -export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15018,7 +15039,8 @@ export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { /** * Optional parameters. */ -export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { +export interface TriggerGetTriggerOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15156,7 +15178,8 @@ export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResp /** * Optional parameters. */ -export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { +export interface DataFlowCreateOrUpdateDataFlowOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15190,7 +15213,8 @@ export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { /** * Optional parameters. */ -export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15388,7 +15412,8 @@ export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & /** * Optional parameters. */ -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15418,7 +15443,8 @@ export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { /** * Optional parameters. */ -export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { +export interface SqlScriptGetSqlScriptOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15658,7 +15684,8 @@ export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse /** * Optional parameters. */ -export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { +export interface NotebookCreateOrUpdateNotebookOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15692,7 +15719,8 @@ export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { /** * Optional parameters. */ -export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { +export interface NotebookGetNotebookOptionalParams + extends coreHttp.OperationOptions { /** * ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15933,7 +15961,8 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces /** * Optional parameters. */ -export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface ArtifactsClientOptionalParams + extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index 0b924baee621..424331700c33 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -14753,7 +14753,8 @@ export const TextFormat: coreHttp.CompositeMapper = { className: "TextFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, columnDelimiter: { @@ -14821,7 +14822,8 @@ export const JsonFormat: coreHttp.CompositeMapper = { className: "JsonFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, filePattern: { @@ -14865,7 +14867,8 @@ export const AvroFormat: coreHttp.CompositeMapper = { className: "AvroFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14879,7 +14882,8 @@ export const OrcFormat: coreHttp.CompositeMapper = { className: "OrcFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14893,7 +14897,8 @@ export const ParquetFormat: coreHttp.CompositeMapper = { className: "ParquetFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: + DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14980,7 +14985,8 @@ export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebAnonymousAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties } @@ -14993,7 +14999,8 @@ export const WebBasicAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebBasicAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, username: { @@ -15020,7 +15027,8 @@ export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebClientCertificateAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: + WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, pfx: { @@ -17634,7 +17642,9 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap ...DependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -17646,7 +17656,9 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap }, size: { constraints: { - Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -17665,7 +17677,8 @@ export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeKeyAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: + LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, key: { @@ -17685,7 +17698,8 @@ export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeRbacAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: + LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, resourceId: { @@ -21040,7 +21054,9 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper ...TriggerDependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, @@ -21051,7 +21067,9 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper }, size: { constraints: { - Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + Pattern: new RegExp( + "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" + ), MaxLength: 15, MinLength: 8 }, diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 3eab86fcd719..977767e2578f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -26,9 +26,10 @@ export class BigDataPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< - BigDataPoolsListResponse - >; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; } /** @@ -44,9 +45,10 @@ export class BigDataPools { bigDataPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - BigDataPoolsGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index 010e6f89c007..3dffca96470b 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -57,7 +57,10 @@ export class DataFlow { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); + result = await this._getDataFlowsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -87,7 +90,10 @@ export class DataFlow { dataFlow, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowCreateOrUpdateDataFlowResponse >; @@ -137,8 +143,13 @@ export class DataFlow { dataFlowName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -169,8 +180,13 @@ export class DataFlow { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -260,7 +276,11 @@ const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataFlow, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index f84fba997bfa..f0e17e80dafd 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -59,7 +59,10 @@ export class DataFlowDebugSession { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); + result = await this._queryDataFlowDebugSessionsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -68,7 +71,9 @@ export class DataFlowDebugSession { private async *queryDataFlowDebugSessionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( + options + )) { yield* page; } } @@ -81,12 +86,17 @@ export class DataFlowDebugSession { async createDataFlowDebugSession( request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions - ): Promise> { + ): Promise< + LROPoller + > { const operationArguments: coreHttp.OperationArguments = { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionCreateDataFlowDebugSessionResponse >; @@ -110,14 +120,18 @@ export class DataFlowDebugSession { */ private _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions - ): Promise { + ): Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse + > { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceOperationSpec - ) as Promise; + ) as Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse + >; } /** @@ -171,7 +185,10 @@ export class DataFlowDebugSession { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionExecuteCommandResponse >; @@ -198,7 +215,9 @@ export class DataFlowDebugSession { private _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions - ): Promise { + ): Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse + > { const operationArguments: coreHttp.OperationArguments = { nextLink, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -206,7 +225,9 @@ export class DataFlowDebugSession { return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceNextOperationSpec - ) as Promise; + ) as Promise< + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse + >; } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 294e0e457bd6..0cef4925e7c0 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -57,7 +57,10 @@ export class Dataset { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDatasetsByWorkspaceNext(continuationToken, options); + result = await this._getDatasetsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -103,7 +106,10 @@ export class Dataset { dataset, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< DatasetCreateOrUpdateDatasetResponse >; @@ -134,9 +140,10 @@ export class Dataset { datasetName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getDatasetOperationSpec) as Promise< - DatasetGetDatasetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getDatasetOperationSpec + ) as Promise; } /** @@ -152,8 +159,13 @@ export class Dataset { datasetName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -184,8 +196,13 @@ export class Dataset { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -274,7 +291,11 @@ const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.datasetName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index 9a8962a761fc..4736ccb3746a 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -2,7 +2,10 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; +import { + IntegrationRuntimesListResponse, + IntegrationRuntimesGetResponse +} from "../models"; /** * Class representing a IntegrationRuntimes. @@ -22,13 +25,16 @@ export class IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - list(options?: coreHttp.OperationOptions): Promise { + list( + options?: coreHttp.OperationOptions + ): Promise { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< - IntegrationRuntimesListResponse - >; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; } /** @@ -44,9 +50,10 @@ export class IntegrationRuntimes { integrationRuntimeName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - IntegrationRuntimesGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 644f8913310e..35436c92ba04 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -57,7 +57,10 @@ export class LinkedService { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); + result = await this._getLinkedServicesByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -66,7 +69,9 @@ export class LinkedService { private async *getLinkedServicesByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { + for await (const page of this.getLinkedServicesByWorkspacePagingPage( + options + )) { yield* page; } } @@ -103,7 +108,10 @@ export class LinkedService { linkedService, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< LinkedServiceCreateOrUpdateLinkedServiceResponse >; @@ -153,8 +161,13 @@ export class LinkedService { linkedServiceName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -185,8 +198,13 @@ export class LinkedService { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -276,7 +294,11 @@ const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.linkedService, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index 6bc39f71c116..df003d827a91 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -59,7 +59,10 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebooksByWorkspaceNext(continuationToken, options); + result = await this._getNotebooksByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -101,7 +104,10 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); + result = await this._getNotebookSummaryByWorkSpaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -110,7 +116,9 @@ export class Notebook { private async *getNotebookSummaryByWorkSpacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( + options + )) { yield* page; } } @@ -163,7 +171,10 @@ export class Notebook { notebook, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< NotebookCreateOrUpdateNotebookResponse >; @@ -213,8 +224,13 @@ export class Notebook { notebookName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -245,8 +261,13 @@ export class Notebook { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -372,7 +393,11 @@ const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.notebook, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.notebookName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index b481a9d165eb..87c49bd6c9ae 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -59,7 +59,10 @@ export class Pipeline { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getPipelinesByWorkspaceNext(continuationToken, options); + result = await this._getPipelinesByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -105,7 +108,10 @@ export class Pipeline { pipeline, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< PipelineCreateOrUpdatePipelineResponse >; @@ -155,8 +161,13 @@ export class Pipeline { pipelineName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -187,8 +198,13 @@ export class Pipeline { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -297,7 +313,11 @@ const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.pipeline, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.pipelineName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 240337eb9067..9c2c3370e5cc 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -157,7 +157,11 @@ const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.filterParameters, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], + urlParameters: [ + Parameters.endpoint, + Parameters.pipelineName, + Parameters.runId + ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index b7510b8e133f..0604bdeeaf1b 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -59,7 +59,10 @@ export class SparkJobDefinition { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); + result = await this._getSparkJobDefinitionsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -68,7 +71,9 @@ export class SparkJobDefinition { private async *getSparkJobDefinitionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( + options + )) { yield* page; } } @@ -162,7 +167,10 @@ export class SparkJobDefinition { sparkJobDefinitionName, options: this.getOperationOptions(options, "location") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionExecuteSparkJobDefinitionResponse >; @@ -197,8 +205,13 @@ export class SparkJobDefinition { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -226,7 +239,10 @@ export class SparkJobDefinition { sparkJobDefinitionAzureResource, options: this.getOperationOptions(options, "location") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionDebugSparkJobDefinitionResponse >; @@ -262,7 +278,9 @@ export class SparkJobDefinition { return this.client.sendOperationRequest( operationArguments, getSparkJobDefinitionsByWorkspaceNextOperationSpec - ) as Promise; + ) as Promise< + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse + >; } private getOperationOptions( @@ -311,7 +329,11 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sparkJobDefinition, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index 375608469931..dc58292e2873 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -26,9 +26,10 @@ export class SqlPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< - SqlPoolsListResponse - >; + return this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ) as Promise; } /** @@ -36,14 +37,18 @@ export class SqlPools { * @param sqlPoolName The Sql Pool name * @param options The options parameters. */ - get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise { + get( + sqlPoolName: string, + options?: coreHttp.OperationOptions + ): Promise { const operationArguments: coreHttp.OperationArguments = { sqlPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - SqlPoolsGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index 8db16a32af20..d3e08c1f3ea1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -57,7 +57,10 @@ export class SqlScript { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); + result = await this._getSqlScriptsByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -163,8 +166,13 @@ export class SqlScript { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -245,7 +253,11 @@ const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sqlScript, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index c9ae4ffdeb84..4f333b059259 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -59,7 +59,10 @@ export class Trigger { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getTriggersByWorkspaceNext(continuationToken, options); + result = await this._getTriggersByWorkspaceNext( + continuationToken, + options + ); continuationToken = result.nextLink; yield result.value || []; } @@ -105,7 +108,10 @@ export class Trigger { trigger, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerCreateOrUpdateTriggerResponse >; @@ -136,9 +142,10 @@ export class Trigger { triggerName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getTriggerOperationSpec) as Promise< - TriggerGetTriggerResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getTriggerOperationSpec + ) as Promise; } /** @@ -154,8 +161,13 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -183,7 +195,10 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerSubscribeTriggerToEventsResponse >; @@ -233,7 +248,10 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerUnsubscribeTriggerFromEventsResponse >; @@ -264,8 +282,13 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -293,8 +316,13 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { - return this.client.sendOperationRequest(args, spec) as Promise; + const sendOperation = ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + return this.client.sendOperationRequest(args, spec) as Promise< + coreHttp.RestResponse + >; }; const initialOperationResult = await sendOperation( @@ -383,7 +411,11 @@ const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.trigger, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.triggerName], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.ifMatch + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index f64442937aa7..20e37cfac269 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -2,7 +2,10 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; +import { + RunFilterParameters, + TriggerRunQueryTriggerRunsByWorkspaceResponse +} from "../models"; /** * Class representing a TriggerRun. @@ -95,7 +98,11 @@ const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], + urlParameters: [ + Parameters.endpoint, + Parameters.runId, + Parameters.triggerName + ], headerParameters: [Parameters.accept], serializer }; @@ -109,7 +116,11 @@ const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], + urlParameters: [ + Parameters.endpoint, + Parameters.runId, + Parameters.triggerName + ], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 9fc379060c1e..4c4453399fb4 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -26,9 +26,10 @@ export class Workspace { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< - WorkspaceGetResponse - >; + return this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ) as Promise; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 550bd2db8014..058888c80532 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -56,7 +56,11 @@ const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.gitHubAccessTokenRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], - headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], + headerParameters: [ + Parameters.accept, + Parameters.contentType, + Parameters.clientRequestId + ], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-spark/review/synapse-spark.api.md b/sdk/synapse/synapse-spark/review/synapse-spark.api.md deleted file mode 100644 index e1689d19b61d..000000000000 --- a/sdk/synapse/synapse-spark/review/synapse-spark.api.md +++ /dev/null @@ -1,582 +0,0 @@ -## API Report File for "@azure/synapse-spark" - -> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). - -```ts - -import * as coreHttp from '@azure/core-http'; - -// @public -export const enum KnownPluginCurrentState { - // (undocumented) - Cleanup = "Cleanup", - // (undocumented) - Ended = "Ended", - // (undocumented) - Monitoring = "Monitoring", - // (undocumented) - Preparation = "Preparation", - // (undocumented) - Queued = "Queued", - // (undocumented) - ResourceAcquisition = "ResourceAcquisition", - // (undocumented) - Submission = "Submission" -} - -// @public -export const enum KnownSchedulerCurrentState { - // (undocumented) - Ended = "Ended", - // (undocumented) - Queued = "Queued", - // (undocumented) - Scheduled = "Scheduled" -} - -// @public -export const enum KnownSparkBatchJobResultType { - // (undocumented) - Cancelled = "Cancelled", - // (undocumented) - Failed = "Failed", - // (undocumented) - Succeeded = "Succeeded", - // (undocumented) - Uncertain = "Uncertain" -} - -// @public -export const enum KnownSparkErrorSource { - // (undocumented) - Dependency = "Dependency", - // (undocumented) - System = "System", - // (undocumented) - Unknown = "Unknown", - // (undocumented) - User = "User" -} - -// @public -export const enum KnownSparkJobType { - // (undocumented) - SparkBatch = "SparkBatch", - // (undocumented) - SparkSession = "SparkSession" -} - -// @public -export const enum KnownSparkSessionResultType { - // (undocumented) - Cancelled = "Cancelled", - // (undocumented) - Failed = "Failed", - // (undocumented) - Succeeded = "Succeeded", - // (undocumented) - Uncertain = "Uncertain" -} - -// @public -export const enum KnownSparkStatementLanguageType { - // (undocumented) - Dotnetspark = "dotnetspark", - // (undocumented) - Pyspark = "pyspark", - // (undocumented) - Spark = "spark", - // (undocumented) - Sql = "sql" -} - -// @public -export type PluginCurrentState = string; - -// @public -export type SchedulerCurrentState = string; - -// @public -export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { - detailed?: boolean; -} - -// @public -export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkBatchJob; - }; -}; - -// @public -export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { - detailed?: boolean; -} - -// @public -export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkBatchJob; - }; -}; - -// @public -export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { - detailed?: boolean; - fromParam?: number; - size?: number; -} - -// @public -export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkBatchJobCollection; - }; -}; - -// @public (undocumented) -export interface SparkBatchJob { - appId?: string | null; - appInfo?: { - [propertyName: string]: string; - } | null; - artifactId?: string; - errors?: SparkServiceError[]; - id: number; - jobType?: SparkJobType; - // (undocumented) - livyInfo?: SparkBatchJobState; - logLines?: string[] | null; - name?: string; - plugin?: SparkServicePlugin; - result?: SparkBatchJobResultType; - scheduler?: SparkScheduler; - sparkPoolName?: string; - state?: string; - submitterId?: string; - submitterName?: string; - tags?: { - [propertyName: string]: string; - }; - workspaceName?: string; -} - -// @public -export interface SparkBatchJobCollection { - from: number; - sessions?: SparkBatchJob[]; - total: number; -} - -// @public (undocumented) -export interface SparkBatchJobOptions { - // (undocumented) - archives?: string[]; - // (undocumented) - arguments?: string[]; - // (undocumented) - artifactId?: string; - // (undocumented) - className?: string; - configuration?: { - [propertyName: string]: string; - }; - // (undocumented) - driverCores?: number; - // (undocumented) - driverMemory?: string; - // (undocumented) - executorCores?: number; - // (undocumented) - executorCount?: number; - // (undocumented) - executorMemory?: string; - // (undocumented) - file: string; - // (undocumented) - files?: string[]; - // (undocumented) - jars?: string[]; - // (undocumented) - name: string; - // (undocumented) - pythonFiles?: string[]; - tags?: { - [propertyName: string]: string; - }; -} - -// @public -export type SparkBatchJobResultType = string; - -// @public (undocumented) -export interface SparkBatchJobState { - currentState?: string; - deadAt?: Date | null; - // (undocumented) - jobCreationRequest?: SparkRequest; - notStartedAt?: Date | null; - recoveringAt?: Date | null; - runningAt?: Date | null; - startingAt?: Date | null; - successAt?: Date | null; - terminatedAt?: Date | null; -} - -// @public (undocumented) -export class SparkClient extends SparkClientContext { - constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams); - // Warning: (ae-forgotten-export) The symbol "SparkBatch" needs to be exported by the entry point index.d.ts - // - // (undocumented) - sparkBatch: SparkBatch; - // Warning: (ae-forgotten-export) The symbol "SparkSession" needs to be exported by the entry point index.d.ts - // - // (undocumented) - sparkSession: SparkSession_2; -} - -// @public (undocumented) -export class SparkClientContext extends coreHttp.ServiceClient { - constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams); - // (undocumented) - endpoint: string; - // (undocumented) - livyApiVersion: string; - // (undocumented) - sparkPoolName: string; -} - -// @public -export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { - endpoint?: string; - livyApiVersion?: string; -} - -// @public -export type SparkErrorSource = string; - -// @public -export type SparkJobType = string; - -// @public (undocumented) -export interface SparkRequest { - // (undocumented) - archives?: string[]; - // (undocumented) - arguments?: string[]; - // (undocumented) - className?: string; - configuration?: { - [propertyName: string]: string; - }; - // (undocumented) - driverCores?: number; - // (undocumented) - driverMemory?: string; - // (undocumented) - executorCores?: number; - // (undocumented) - executorCount?: number; - // (undocumented) - executorMemory?: string; - // (undocumented) - file?: string; - // (undocumented) - files?: string[]; - // (undocumented) - jars?: string[]; - // (undocumented) - name?: string; - // (undocumented) - pythonFiles?: string[]; -} - -// @public (undocumented) -export interface SparkScheduler { - // (undocumented) - cancellationRequestedAt?: Date; - // (undocumented) - currentState?: SchedulerCurrentState; - // (undocumented) - endedAt?: Date | null; - // (undocumented) - scheduledAt?: Date | null; - // (undocumented) - submittedAt?: Date | null; -} - -// @public (undocumented) -export interface SparkServiceError { - // (undocumented) - errorCode?: string; - // (undocumented) - message?: string; - // (undocumented) - source?: SparkErrorSource; -} - -// @public (undocumented) -export interface SparkServicePlugin { - // (undocumented) - cleanupStartedAt?: Date | null; - // (undocumented) - currentState?: PluginCurrentState; - // (undocumented) - monitoringStartedAt?: Date | null; - // (undocumented) - preparationStartedAt?: Date | null; - // (undocumented) - resourceAcquisitionStartedAt?: Date | null; - // (undocumented) - submissionStartedAt?: Date | null; -} - -// @public (undocumented) -export interface SparkSession { - // (undocumented) - appId?: string | null; - appInfo?: { - [propertyName: string]: string; - } | null; - // (undocumented) - artifactId?: string; - errors?: SparkServiceError[]; - // (undocumented) - id: number; - jobType?: SparkJobType; - // (undocumented) - livyInfo?: SparkSessionState; - // (undocumented) - logLines?: string[] | null; - // (undocumented) - name?: string; - // (undocumented) - plugin?: SparkServicePlugin; - // (undocumented) - result?: SparkSessionResultType; - // (undocumented) - scheduler?: SparkScheduler; - // (undocumented) - sparkPoolName?: string; - // (undocumented) - state?: string; - // (undocumented) - submitterId?: string; - // (undocumented) - submitterName?: string; - tags?: { - [propertyName: string]: string; - }; - // (undocumented) - workspaceName?: string; -} - -// @public -export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellationResult & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkStatementCancellationResult; - }; -}; - -// @public (undocumented) -export interface SparkSessionCollection { - // (undocumented) - from: number; - // (undocumented) - sessions?: SparkSession[]; - // (undocumented) - total: number; -} - -// @public -export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { - detailed?: boolean; -} - -// @public -export type SparkSessionCreateSparkSessionResponse = SparkSession & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkSession; - }; -}; - -// @public -export type SparkSessionCreateSparkStatementResponse = SparkStatement & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkStatement; - }; -}; - -// @public -export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { - detailed?: boolean; -} - -// @public -export type SparkSessionGetSparkSessionResponse = SparkSession & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkSession; - }; -}; - -// @public -export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { - detailed?: boolean; - fromParam?: number; - size?: number; -} - -// @public -export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkSessionCollection; - }; -}; - -// @public -export type SparkSessionGetSparkStatementResponse = SparkStatement & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkStatement; - }; -}; - -// @public -export type SparkSessionGetSparkStatementsResponse = SparkStatementCollection & { - _response: coreHttp.HttpResponse & { - bodyAsText: string; - parsedBody: SparkStatementCollection; - }; -}; - -// @public (undocumented) -export interface SparkSessionOptions { - // (undocumented) - archives?: string[]; - // (undocumented) - arguments?: string[]; - // (undocumented) - artifactId?: string; - // (undocumented) - className?: string; - configuration?: { - [propertyName: string]: string; - }; - // (undocumented) - driverCores?: number; - // (undocumented) - driverMemory?: string; - // (undocumented) - executorCores?: number; - // (undocumented) - executorCount?: number; - // (undocumented) - executorMemory?: string; - // (undocumented) - file?: string; - // (undocumented) - files?: string[]; - // (undocumented) - jars?: string[]; - // (undocumented) - name: string; - // (undocumented) - pythonFiles?: string[]; - tags?: { - [propertyName: string]: string; - }; -} - -// @public -export type SparkSessionResultType = string; - -// @public (undocumented) -export interface SparkSessionState { - // (undocumented) - busyAt?: Date | null; - // (undocumented) - currentState?: string; - // (undocumented) - deadAt?: Date | null; - // (undocumented) - errorAt?: Date | null; - // (undocumented) - idleAt?: Date | null; - // (undocumented) - jobCreationRequest?: SparkRequest; - // (undocumented) - notStartedAt?: Date | null; - // (undocumented) - recoveringAt?: Date | null; - // (undocumented) - shuttingDownAt?: Date | null; - // (undocumented) - startingAt?: Date | null; - terminatedAt?: Date | null; -} - -// @public (undocumented) -export interface SparkStatement { - // (undocumented) - code?: string; - // (undocumented) - id: number; - // (undocumented) - output?: SparkStatementOutput; - // (undocumented) - state?: string; -} - -// @public (undocumented) -export interface SparkStatementCancellationResult { - // (undocumented) - msg?: string; -} - -// @public (undocumented) -export interface SparkStatementCollection { - // (undocumented) - statements?: SparkStatement[]; - // (undocumented) - total: number; -} - -// @public -export type SparkStatementLanguageType = string; - -// @public (undocumented) -export interface SparkStatementOptions { - // (undocumented) - code?: string; - // (undocumented) - kind?: SparkStatementLanguageType; -} - -// @public (undocumented) -export interface SparkStatementOutput { - data?: any; - // (undocumented) - errorName?: string; - // (undocumented) - errorValue?: string; - // (undocumented) - executionCount: number; - // (undocumented) - status?: string; - // (undocumented) - traceback?: string[]; -} - - -// (No @packageDocumentation comment for this package) - -``` diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts index 8f7ea475c8cf..c388e24dc015 100644 --- a/sdk/synapse/synapse-spark/src/models/index.ts +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -465,7 +465,8 @@ export type SparkStatementLanguageType = string; /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobsOptionalParams + extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -504,7 +505,8 @@ export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { /** * Optional parameters. */ -export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { +export interface SparkBatchCreateSparkBatchJobOptionalParams + extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -534,7 +536,8 @@ export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobOptionalParams + extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -564,7 +567,8 @@ export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionsOptionalParams + extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -603,7 +607,8 @@ export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { /** * Optional parameters. */ -export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { +export interface SparkSessionCreateSparkSessionOptionalParams + extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -633,7 +638,8 @@ export type SparkSessionCreateSparkSessionResponse = SparkSession & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionOptionalParams + extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -743,7 +749,8 @@ export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellatio /** * Optional parameters. */ -export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface SparkClientOptionalParams + extends coreHttp.ServiceClientOptions { /** * Valid api-version for the request. */ diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts index 0a8b12b48eae..d9d36b0a8b52 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -112,7 +112,11 @@ const getSparkBatchJobsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], headerParameters: [Parameters.accept], serializer }; @@ -126,7 +130,11 @@ const createSparkBatchJobOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkBatchJobOptions, queryParameters: [Parameters.detailed], - urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts index 60decb5b9384..844d4d4331ac 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -221,7 +221,11 @@ const getSparkSessionsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], headerParameters: [Parameters.accept], serializer }; @@ -235,7 +239,11 @@ const createSparkSessionOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkSessionOptions, queryParameters: [Parameters.detailed], - urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName + ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts index 0c01469a88a2..1cdf3c8419be 100644 --- a/sdk/synapse/synapse-spark/src/sparkClientContext.ts +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -43,12 +43,17 @@ export class SparkClientContext extends coreHttp.ServiceClient { options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; } + if (!options.credentialScopes) { + options.credentialScopes = ["https://microsoft.com"]; + } + super(credentials, options); this.requestContentType = "application/json; charset=utf-8"; this.baseUri = - options.endpoint || "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; + options.endpoint || + "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; // Parameter assignments this.endpoint = endpoint; From 4287d43eab5c45f392ec9e6bd35a71111cbd7e5a Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Tue, 8 Dec 2020 06:30:55 +1300 Subject: [PATCH 16/28] WIP --- .../src/accessControlClient.ts | 6 +- .../synapse-accesscontrol/src/models/index.ts | 3 +- .../src/artifactsClientContext.ts | 14 +--- .../src/lro/azureAsyncOperationStrategy.ts | 42 +++-------- .../src/lro/bodyPollingStrategy.ts | 5 +- .../src/lro/locationStrategy.ts | 7 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 8 +-- .../synapse-artifacts/src/lro/lroPoller.ts | 27 ++------ .../synapse-artifacts/src/lro/models.ts | 8 +-- .../synapse-artifacts/src/lro/operation.ts | 4 +- .../synapse-artifacts/src/lro/requestUtils.ts | 16 +---- .../synapse-artifacts/src/models/index.ts | 69 ++++++------------- .../synapse-artifacts/src/models/mappers.ts | 46 ++++--------- .../src/operations/bigDataPools.ts | 14 ++-- .../src/operations/dataFlow.ts | 34 ++------- .../src/operations/dataFlowDebugSession.ts | 39 +++-------- .../src/operations/dataset.ts | 41 +++-------- .../src/operations/integrationRuntimes.ts | 23 +++---- .../src/operations/linkedService.ts | 38 +++------- .../src/operations/notebook.ts | 43 +++--------- .../src/operations/pipeline.ts | 34 ++------- .../src/operations/pipelineRun.ts | 6 +- .../src/operations/sparkJobDefinition.ts | 38 +++------- .../src/operations/sqlPools.ts | 19 ++--- .../src/operations/sqlScript.ts | 20 ++---- .../src/operations/trigger.ts | 60 ++++------------ .../src/operations/triggerRun.ts | 17 +---- .../src/operations/workspace.ts | 7 +- .../operations/workspaceGitRepoManagement.ts | 6 +- sdk/synapse/synapse-spark/src/models/index.ts | 21 ++---- .../src/operations/sparkBatch.ts | 12 +--- .../src/operations/sparkSession.ts | 12 +--- .../synapse-spark/src/sparkClientContext.ts | 3 +- 33 files changed, 180 insertions(+), 562 deletions(-) diff --git a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts index 0a7ff0c8a2e3..61aeccd43e68 100644 --- a/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts +++ b/sdk/synapse/synapse-accesscontrol/src/accessControlClient.ts @@ -291,11 +291,7 @@ const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [ - Parameters.apiVersion, - Parameters.roleId1, - Parameters.principalId - ], + queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.continuationToken], serializer diff --git a/sdk/synapse/synapse-accesscontrol/src/models/index.ts b/sdk/synapse/synapse-accesscontrol/src/models/index.ts index e651fb487f46..8dc93686ed46 100644 --- a/sdk/synapse/synapse-accesscontrol/src/models/index.ts +++ b/sdk/synapse/synapse-accesscontrol/src/models/index.ts @@ -269,8 +269,7 @@ export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListRespons /** * Optional parameters. */ -export interface AccessControlClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts index 8dfd8150282a..54dabfa971ea 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -48,17 +48,12 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { // When an array of factories is passed in, we'll just add the required factories, // in this case lroPolicy(). It is important to note that passing an array of factories // to a new client, bypasses core-http default factories. Just the pipelines provided will be run. - options.requestPolicyFactories = [ - lroPolicy(), - ...options.requestPolicyFactories - ]; + options.requestPolicyFactories = [lroPolicy(), ...options.requestPolicyFactories]; } else if (options.requestPolicyFactories) { // When we were passed a requestPolicyFactories as a function, we'll create a new one that adds the factories provided // in the options plus the required policies. When using this path, the pipelines passed to the client will be added to the // default policies added by core-http - const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [ - lroPolicy() - ]; + const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [lroPolicy()]; options.requestPolicyFactories = (defaultFactories) => [ ...optionsPolicies, ...defaultFactories @@ -66,10 +61,7 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { } else { // In case no request policy factories were provided, we'll just need to create a function that will add // the lroPolicy to the default pipelines added by core-http - options.requestPolicyFactories = (defaultFactories) => [ - lroPolicy(), - ...defaultFactories - ]; + options.requestPolicyFactories = (defaultFactories) => [lroPolicy(), ...defaultFactories]; } super(credentials, options); diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index 0b5d4232590f..3c2fd2e5d6c9 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -6,11 +6,7 @@ import { FinalStateVia, LROSYM } from "./models"; -import { - OperationSpec, - OperationArguments, - OperationResponse -} from "@azure/core-http"; +import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; import { terminalStates } from "./constants"; import { SendOperationFn } from "."; @@ -21,14 +17,11 @@ export function createAzureAsyncOperationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; - let lastKnownPollingUrl = - lroData.azureAsyncOperation || lroData.operationLocation; + let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; return { isTerminal: () => { @@ -59,17 +52,12 @@ export function createAzureAsyncOperationStrategy( const initialOperationResult = initialOperation.result._response[LROSYM]; const currentOperationResult = currentOperation.result._response[LROSYM]; - if ( - !shouldPerformFinalGet(initialOperationResult, currentOperationResult) - ) { + if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { return currentOperation; } if (initialOperationResult?.requestMethod === "PUT") { - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; } @@ -77,29 +65,20 @@ export function createAzureAsyncOperationStrategy( if (initialOperationResult?.location) { switch (finalStateVia) { case "original-uri": - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; case "azure-async-operation": return currentOperation; case "location": default: - const location = - initialOperationResult.location || - currentOperationResult?.location; + const location = initialOperationResult.location || currentOperationResult?.location; if (!location) { throw new Error("Couldn't determine final GET URL from location"); } - return await sendFinalGet( - initialOperation, - sendOperationFn, - location - ); + return await sendFinalGet(initialOperation, sendOperationFn, location); } } @@ -177,10 +156,7 @@ function getCompositeMappers(responses: { }, {} as { [responseCode: string]: OperationResponse }); } -function shouldPerformFinalGet( - initialResult?: LROResponseInfo, - currentResult?: LROResponseInfo -) { +function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { const { status } = currentResult || {}; const { requestMethod: initialRequestMethod, location } = initialResult || {}; if (status && status.toLowerCase() !== "succeeded") { diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index 62ed188e691e..49333c25b430 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -44,10 +44,7 @@ export function createBodyPollingStrategy( }; // Execute the polling operation - initialOperation.result = await sendOperation( - initialOperation.args, - pollingSpec - ); + initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); return initialOperation; } }; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index cfcfa8efd0a7..008f60177503 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -8,9 +8,7 @@ export function createLocationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; @@ -51,8 +49,7 @@ export function createLocationStrategy( const result = await sendOperationFn(pollingArgs, pollingSpec); // Update latest polling url - lastKnownPollingUrl = - result._response[LROSYM]?.location || lastKnownPollingUrl; + lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; // Update lastOperation result currentOperation = { diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index e686401a5cf9..1e1ec61db3b3 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -21,12 +21,8 @@ class LROPolicy extends BaseRequestPolicy { super(nextPolicy, options); } - public async sendRequest( - webResource: WebResource - ): Promise { - let result: LROOperationResponse = await this._nextPolicy.sendRequest( - webResource - ); + public async sendRequest(webResource: WebResource): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); const _lroData = getLROData(result); result[LROSYM] = _lroData; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index 72d979bd46a9..9ab5f25fb49a 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -1,17 +1,6 @@ import { Poller } from "@azure/core-lro"; -import { - OperationSpec, - OperationArguments, - delay, - RestError -} from "@azure/core-http"; -import { - BaseResult, - LROOperationState, - LROOperationStep, - FinalStateVia, - LROSYM -} from "./models"; +import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; +import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; import { makeOperation } from "./operation"; import { createBodyPollingStrategy } from "./bodyPollingStrategy"; import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; @@ -70,11 +59,7 @@ export class LROPoller extends Poller< result: initialOperationResult }; - const pollingStrategy = getPollingStrategy( - initialOperation, - sendOperation, - finalStateVia - ); + const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); const state: LROOperationState = { // Initial operation will become the last operation @@ -127,11 +112,7 @@ function getPollingStrategy( } if (lroData.azureAsyncOperation || lroData.operationLocation) { - return createAzureAsyncOperationStrategy( - initialOperation, - sendOperationFn, - finalStateVia - ); + return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); } if (lroData.location) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index ed0dd9132876..704c731cf370 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -8,10 +8,7 @@ import { import { PollOperationState, PollOperation } from "@azure/core-lro"; export const LROSYM = Symbol("LROData"); -export type FinalStateVia = - | "azure-async-operation" - | "location" - | "original-uri"; +export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; export interface LROResponseInfo { requestMethod: HttpMethods; @@ -47,8 +44,7 @@ export interface LROOperationStep { result: TResult; } -export interface LROOperationState - extends PollOperationState { +export interface LROOperationState extends PollOperationState { lastOperation: LROOperationStep; initialOperation: LROOperationStep; pollingStrategy: LROStrategy; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index 9b37277e7b2a..e1e3fa2e1126 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -41,9 +41,7 @@ async function update( const currentLroData = currentResponse.result._response[LROSYM]; if (!currentLroData) { - throw new Error( - "Expected lroData to be defined for updating LRO operation" - ); + throw new Error("Expected lroData to be defined for updating LRO operation"); } if (state.result) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index e3289b95905b..3c518804edec 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -21,17 +21,10 @@ export function shouldDeserializeLRO(finalStateVia?: string) { isInitialRequest = false; } - if ( - initialOperationInfo.azureAsyncOperation || - initialOperationInfo.operationLocation - ) { + if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { return ( !isInitialRequest && - isAsyncOperationFinalResponse( - response, - initialOperationInfo, - finalStateVia - ) + isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) ); } @@ -69,10 +62,7 @@ function isAsyncOperationFinalResponse( return true; } - if ( - initialOperationInfo.requestMethod !== "PUT" && - !initialOperationInfo.location - ) { + if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { return true; } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 4bcfe16ee83c..274f837efafd 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -199,9 +199,7 @@ export type TriggerUnion = | TumblingWindowTrigger | ChainingTrigger; export type DataFlowUnion = MappingDataFlow; -export type IntegrationRuntimeUnion = - | ManagedIntegrationRuntime - | SelfHostedIntegrationRuntime; +export type IntegrationRuntimeUnion = ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; export type SecretBaseUnion = SecureString | AzureKeyVaultSecretReference; export type DatasetLocationUnion = | AzureBlobStorageLocation @@ -352,10 +350,7 @@ export type ExecutionActivityUnion = | ExecuteDataFlowActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity; -export type MultiplePipelineTriggerUnion = - | ScheduleTrigger - | BlobTrigger - | BlobEventsTrigger; +export type MultiplePipelineTriggerUnion = ScheduleTrigger | BlobTrigger | BlobEventsTrigger; export type TabularSourceUnion = | AzureTableSource | InformixSource @@ -3134,12 +3129,7 @@ export interface DatasetStorageFormat { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "TextFormat" - | "JsonFormat" - | "AvroFormat" - | "OrcFormat" - | "ParquetFormat"; + type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -3329,10 +3319,7 @@ export interface FormatWriteSettings { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "AvroWriteSettings" - | "DelimitedTextWriteSettings" - | "JsonWriteSettings"; + type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -14607,8 +14594,7 @@ export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceReso /** * Optional parameters. */ -export interface LinkedServiceGetLinkedServiceOptionalParams - extends coreHttp.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { /** * ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14678,8 +14664,7 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { /** * Optional parameters. */ -export interface DatasetCreateOrUpdateDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14713,8 +14698,7 @@ export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { /** * Optional parameters. */ -export interface DatasetGetDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14784,8 +14768,7 @@ export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { /** * Optional parameters. */ -export interface PipelineCreateOrUpdatePipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -14819,8 +14802,7 @@ export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineGetPipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -14850,8 +14832,7 @@ export type PipelineGetPipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineCreatePipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { /** * Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ @@ -14973,8 +14954,7 @@ export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { /** * Optional parameters. */ -export interface PipelineRunCancelPipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { /** * If true, cancel all the Child pipelines that are triggered by the current pipeline. */ @@ -15004,8 +14984,7 @@ export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { /** * Optional parameters. */ -export interface TriggerCreateOrUpdateTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15039,8 +15018,7 @@ export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { /** * Optional parameters. */ -export interface TriggerGetTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15178,8 +15156,7 @@ export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResp /** * Optional parameters. */ -export interface DataFlowCreateOrUpdateDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15213,8 +15190,7 @@ export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { /** * Optional parameters. */ -export interface DataFlowGetDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15412,8 +15388,7 @@ export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & /** * Optional parameters. */ -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { /** * ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15443,8 +15418,7 @@ export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { /** * Optional parameters. */ -export interface SqlScriptGetSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { /** * ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15684,8 +15658,7 @@ export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse /** * Optional parameters. */ -export interface NotebookCreateOrUpdateNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { /** * ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -15719,8 +15692,7 @@ export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { /** * Optional parameters. */ -export interface NotebookGetNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { /** * ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -15961,8 +15933,7 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces /** * Optional parameters. */ -export interface ArtifactsClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index 424331700c33..0b924baee621 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -14753,8 +14753,7 @@ export const TextFormat: coreHttp.CompositeMapper = { className: "TextFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, columnDelimiter: { @@ -14822,8 +14821,7 @@ export const JsonFormat: coreHttp.CompositeMapper = { className: "JsonFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, filePattern: { @@ -14867,8 +14865,7 @@ export const AvroFormat: coreHttp.CompositeMapper = { className: "AvroFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14882,8 +14879,7 @@ export const OrcFormat: coreHttp.CompositeMapper = { className: "OrcFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14897,8 +14893,7 @@ export const ParquetFormat: coreHttp.CompositeMapper = { className: "ParquetFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14985,8 +14980,7 @@ export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebAnonymousAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties } @@ -14999,8 +14993,7 @@ export const WebBasicAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebBasicAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, username: { @@ -15027,8 +15020,7 @@ export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebClientCertificateAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, pfx: { @@ -17642,9 +17634,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap ...DependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -17656,9 +17646,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap }, size: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -17677,8 +17665,7 @@ export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeKeyAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, key: { @@ -17698,8 +17685,7 @@ export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeRbacAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, resourceId: { @@ -21054,9 +21040,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper ...TriggerDependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -21067,9 +21051,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper }, size: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 977767e2578f..3eab86fcd719 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -26,10 +26,9 @@ export class BigDataPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + BigDataPoolsListResponse + >; } /** @@ -45,10 +44,9 @@ export class BigDataPools { bigDataPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + BigDataPoolsGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index 3dffca96470b..010e6f89c007 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -57,10 +57,7 @@ export class DataFlow { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDataFlowsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -90,10 +87,7 @@ export class DataFlow { dataFlow, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowCreateOrUpdateDataFlowResponse >; @@ -143,13 +137,8 @@ export class DataFlow { dataFlowName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -180,13 +169,8 @@ export class DataFlow { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -276,11 +260,7 @@ const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataFlow, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index f0e17e80dafd..f84fba997bfa 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -59,10 +59,7 @@ export class DataFlowDebugSession { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._queryDataFlowDebugSessionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -71,9 +68,7 @@ export class DataFlowDebugSession { private async *queryDataFlowDebugSessionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( - options - )) { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { yield* page; } } @@ -86,17 +81,12 @@ export class DataFlowDebugSession { async createDataFlowDebugSession( request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions - ): Promise< - LROPoller - > { + ): Promise> { const operationArguments: coreHttp.OperationArguments = { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionCreateDataFlowDebugSessionResponse >; @@ -120,18 +110,14 @@ export class DataFlowDebugSession { */ private _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - > { + ): Promise { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceOperationSpec - ) as Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - >; + ) as Promise; } /** @@ -185,10 +171,7 @@ export class DataFlowDebugSession { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DataFlowDebugSessionExecuteCommandResponse >; @@ -215,9 +198,7 @@ export class DataFlowDebugSession { private _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - > { + ): Promise { const operationArguments: coreHttp.OperationArguments = { nextLink, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -225,9 +206,7 @@ export class DataFlowDebugSession { return this.client.sendOperationRequest( operationArguments, queryDataFlowDebugSessionsByWorkspaceNextOperationSpec - ) as Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - >; + ) as Promise; } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 0cef4925e7c0..294e0e457bd6 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -57,10 +57,7 @@ export class Dataset { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDatasetsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDatasetsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -106,10 +103,7 @@ export class Dataset { dataset, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< DatasetCreateOrUpdateDatasetResponse >; @@ -140,10 +134,9 @@ export class Dataset { datasetName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getDatasetOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getDatasetOperationSpec) as Promise< + DatasetGetDatasetResponse + >; } /** @@ -159,13 +152,8 @@ export class Dataset { datasetName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -196,13 +184,8 @@ export class Dataset { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -291,11 +274,7 @@ const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.datasetName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index 4736ccb3746a..9a8962a761fc 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -2,10 +2,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { - IntegrationRuntimesListResponse, - IntegrationRuntimesGetResponse -} from "../models"; +import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; /** * Class representing a IntegrationRuntimes. @@ -25,16 +22,13 @@ export class IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - list( - options?: coreHttp.OperationOptions - ): Promise { + list(options?: coreHttp.OperationOptions): Promise { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + IntegrationRuntimesListResponse + >; } /** @@ -50,10 +44,9 @@ export class IntegrationRuntimes { integrationRuntimeName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + IntegrationRuntimesGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 35436c92ba04..644f8913310e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -57,10 +57,7 @@ export class LinkedService { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getLinkedServicesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -69,9 +66,7 @@ export class LinkedService { private async *getLinkedServicesByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getLinkedServicesByWorkspacePagingPage( - options - )) { + for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { yield* page; } } @@ -108,10 +103,7 @@ export class LinkedService { linkedService, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< LinkedServiceCreateOrUpdateLinkedServiceResponse >; @@ -161,13 +153,8 @@ export class LinkedService { linkedServiceName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -198,13 +185,8 @@ export class LinkedService { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -294,11 +276,7 @@ const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.linkedService, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index df003d827a91..6bc39f71c116 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -59,10 +59,7 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebooksByWorkspaceNext( - continuationToken, - options - ); + result = await this._getNotebooksByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -104,10 +101,7 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebookSummaryByWorkSpaceNext( - continuationToken, - options - ); + result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -116,9 +110,7 @@ export class Notebook { private async *getNotebookSummaryByWorkSpacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( - options - )) { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { yield* page; } } @@ -171,10 +163,7 @@ export class Notebook { notebook, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< NotebookCreateOrUpdateNotebookResponse >; @@ -224,13 +213,8 @@ export class Notebook { notebookName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -261,13 +245,8 @@ export class Notebook { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -393,11 +372,7 @@ const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.notebook, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.notebookName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index 87c49bd6c9ae..b481a9d165eb 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -59,10 +59,7 @@ export class Pipeline { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getPipelinesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getPipelinesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -108,10 +105,7 @@ export class Pipeline { pipeline, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< PipelineCreateOrUpdatePipelineResponse >; @@ -161,13 +155,8 @@ export class Pipeline { pipelineName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -198,13 +187,8 @@ export class Pipeline { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -313,11 +297,7 @@ const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.pipeline, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.pipelineName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 9c2c3370e5cc..240337eb9067 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -157,11 +157,7 @@ const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.filterParameters, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.pipelineName, - Parameters.runId - ], + urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index 0604bdeeaf1b..b7510b8e133f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -59,10 +59,7 @@ export class SparkJobDefinition { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSparkJobDefinitionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -71,9 +68,7 @@ export class SparkJobDefinition { private async *getSparkJobDefinitionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( - options - )) { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { yield* page; } } @@ -167,10 +162,7 @@ export class SparkJobDefinition { sparkJobDefinitionName, options: this.getOperationOptions(options, "location") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionExecuteSparkJobDefinitionResponse >; @@ -205,13 +197,8 @@ export class SparkJobDefinition { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -239,10 +226,7 @@ export class SparkJobDefinition { sparkJobDefinitionAzureResource, options: this.getOperationOptions(options, "location") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< SparkJobDefinitionDebugSparkJobDefinitionResponse >; @@ -278,9 +262,7 @@ export class SparkJobDefinition { return this.client.sendOperationRequest( operationArguments, getSparkJobDefinitionsByWorkspaceNextOperationSpec - ) as Promise< - SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse - >; + ) as Promise; } private getOperationOptions( @@ -329,11 +311,7 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sparkJobDefinition, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index dc58292e2873..375608469931 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -26,10 +26,9 @@ export class SqlPools { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, listOperationSpec) as Promise< + SqlPoolsListResponse + >; } /** @@ -37,18 +36,14 @@ export class SqlPools { * @param sqlPoolName The Sql Pool name * @param options The options parameters. */ - get( - sqlPoolName: string, - options?: coreHttp.OperationOptions - ): Promise { + get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise { const operationArguments: coreHttp.OperationArguments = { sqlPoolName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + SqlPoolsGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index d3e08c1f3ea1..8db16a32af20 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -57,10 +57,7 @@ export class SqlScript { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSqlScriptsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -166,13 +163,8 @@ export class SqlScript { request, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -253,11 +245,7 @@ const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sqlScript, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index 4f333b059259..c9ae4ffdeb84 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -59,10 +59,7 @@ export class Trigger { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getTriggersByWorkspaceNext( - continuationToken, - options - ); + result = await this._getTriggersByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -108,10 +105,7 @@ export class Trigger { trigger, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerCreateOrUpdateTriggerResponse >; @@ -142,10 +136,9 @@ export class Trigger { triggerName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getTriggerOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getTriggerOperationSpec) as Promise< + TriggerGetTriggerResponse + >; } /** @@ -161,13 +154,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -195,10 +183,7 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerSubscribeTriggerToEventsResponse >; @@ -248,10 +233,7 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { return this.client.sendOperationRequest(args, spec) as Promise< TriggerUnsubscribeTriggerFromEventsResponse >; @@ -282,13 +264,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -316,13 +293,8 @@ export class Trigger { triggerName, options: this.getOperationOptions(options, "undefined") }; - const sendOperation = ( - args: coreHttp.OperationArguments, - spec: coreHttp.OperationSpec - ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + const sendOperation = (args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec) => { + return this.client.sendOperationRequest(args, spec) as Promise; }; const initialOperationResult = await sendOperation( @@ -411,11 +383,7 @@ const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.trigger, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.triggerName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index 20e37cfac269..f64442937aa7 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -2,10 +2,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { - RunFilterParameters, - TriggerRunQueryTriggerRunsByWorkspaceResponse -} from "../models"; +import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; /** * Class representing a TriggerRun. @@ -98,11 +95,7 @@ const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; @@ -116,11 +109,7 @@ const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 4c4453399fb4..9fc379060c1e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -26,10 +26,9 @@ export class Workspace { const operationArguments: coreHttp.OperationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + return this.client.sendOperationRequest(operationArguments, getOperationSpec) as Promise< + WorkspaceGetResponse + >; } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 058888c80532..550bd2db8014 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -56,11 +56,7 @@ const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.gitHubAccessTokenRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.clientRequestId - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts index c388e24dc015..8f7ea475c8cf 100644 --- a/sdk/synapse/synapse-spark/src/models/index.ts +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -465,8 +465,7 @@ export type SparkStatementLanguageType = string; /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobsOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -505,8 +504,7 @@ export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { /** * Optional parameters. */ -export interface SparkBatchCreateSparkBatchJobOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -536,8 +534,7 @@ export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -567,8 +564,7 @@ export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionsOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -607,8 +603,7 @@ export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { /** * Optional parameters. */ -export interface SparkSessionCreateSparkSessionOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -638,8 +633,7 @@ export type SparkSessionCreateSparkSessionResponse = SparkSession & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -749,8 +743,7 @@ export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellatio /** * Optional parameters. */ -export interface SparkClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Valid api-version for the request. */ diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts index d9d36b0a8b52..0a8b12b48eae 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -112,11 +112,7 @@ const getSparkBatchJobsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept], serializer }; @@ -130,11 +126,7 @@ const createSparkBatchJobOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkBatchJobOptions, queryParameters: [Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts index 844d4d4331ac..60decb5b9384 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -221,11 +221,7 @@ const getSparkSessionsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept], serializer }; @@ -239,11 +235,7 @@ const createSparkSessionOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkSessionOptions, queryParameters: [Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts index 1cdf3c8419be..7b317d1d8d50 100644 --- a/sdk/synapse/synapse-spark/src/sparkClientContext.ts +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -52,8 +52,7 @@ export class SparkClientContext extends coreHttp.ServiceClient { this.requestContentType = "application/json; charset=utf-8"; this.baseUri = - options.endpoint || - "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; + options.endpoint || "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; // Parameter assignments this.endpoint = endpoint; From 5b74ab3c8ebb5f2a0d07b6fd6135d3239ca563c5 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 9 Dec 2020 10:23:20 +1300 Subject: [PATCH 17/28] New regeneration with tracing --- sdk/synapse/synapse-access-control/README.md | 2 +- .../synapse-access-control/package.json | 2 + .../synapse-access-control/rollup.config.js | 10 +- .../src/accessControlClient.ts | 218 +++++++++++---- .../synapse-access-control/src/tracing.ts | 6 + sdk/synapse/synapse-artifacts/README.md | 2 +- sdk/synapse/synapse-artifacts/package.json | 2 + .../synapse-artifacts/rollup.config.js | 10 +- .../src/operations/bigDataPools.ts | 58 +++- .../src/operations/dataFlow.ts | 152 ++++++++--- .../src/operations/dataFlowDebugSession.ts | 160 ++++++++--- .../src/operations/dataset.ts | 152 ++++++++--- .../src/operations/integrationRuntimes.ts | 56 +++- .../src/operations/linkedService.ts | 152 ++++++++--- .../src/operations/notebook.ts | 206 +++++++++++---- .../src/operations/pipeline.ts | 179 ++++++++++--- .../src/operations/pipelineRun.ts | 110 ++++++-- .../src/operations/sparkJobDefinition.ts | 208 +++++++++++---- .../src/operations/sqlPools.ts | 58 +++- .../src/operations/sqlScript.ts | 160 ++++++++--- .../src/operations/trigger.ts | 248 ++++++++++++++---- .../src/operations/triggerRun.ts | 83 ++++-- .../src/operations/workspace.ts | 31 ++- .../operations/workspaceGitRepoManagement.ts | 29 +- sdk/synapse/synapse-artifacts/src/tracing.ts | 6 + .../README.md | 2 +- .../package.json | 2 + .../rollup.config.js | 10 +- .../src/operations/managedPrivateEndpoints.ts | 137 +++++++--- .../src/tracing.ts | 6 + sdk/synapse/synapse-monitoring/README.md | 2 +- sdk/synapse/synapse-monitoring/package.json | 2 + .../synapse-monitoring/rollup.config.js | 10 +- .../src/operations/monitoring.ts | 56 +++- sdk/synapse/synapse-monitoring/src/tracing.ts | 6 + sdk/synapse/synapse-spark/README.md | 2 +- sdk/synapse/synapse-spark/package.json | 2 + sdk/synapse/synapse-spark/rollup.config.js | 10 +- .../src/operations/sparkBatch.ts | 110 ++++++-- .../src/operations/sparkSession.ts | 245 +++++++++++++---- sdk/synapse/synapse-spark/src/tracing.ts | 6 + 41 files changed, 2283 insertions(+), 625 deletions(-) create mode 100644 sdk/synapse/synapse-access-control/src/tracing.ts create mode 100644 sdk/synapse/synapse-artifacts/src/tracing.ts create mode 100644 sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts create mode 100644 sdk/synapse/synapse-monitoring/src/tracing.ts create mode 100644 sdk/synapse/synapse-spark/src/tracing.ts diff --git a/sdk/synapse/synapse-access-control/README.md b/sdk/synapse/synapse-access-control/README.md index a3d12710f93b..e88208dad968 100644 --- a/sdk/synapse/synapse-access-control/README.md +++ b/sdk/synapse/synapse-access-control/README.md @@ -1,6 +1,6 @@ ## Azure Synapse Access Control client library for JavaScript -This package contains an isomorphic SDK for SparkClient. +This package contains an isomorphic SDK for Access Control. ## Getting started diff --git a/sdk/synapse/synapse-access-control/package.json b/sdk/synapse/synapse-access-control/package.json index e5d25ca26b2a..ea8e632a5442 100644 --- a/sdk/synapse/synapse-access-control/package.json +++ b/sdk/synapse/synapse-access-control/package.json @@ -24,7 +24,9 @@ "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", "uglify-js": "^3.4.9", + "@opentelemetry/api": "^0.10.2", "@microsoft/api-extractor": "7.7.11" }, "homepage": "https://github.com/Azure/azure-sdk-for-js", diff --git a/sdk/synapse/synapse-access-control/rollup.config.js b/sdk/synapse/synapse-access-control/rollup.config.js index baa8b6898498..61ff3b304f30 100644 --- a/sdk/synapse/synapse-access-control/rollup.config.js +++ b/sdk/synapse/synapse-access-control/rollup.config.js @@ -1,6 +1,7 @@ import rollup from "rollup"; import nodeResolve from "rollup-plugin-node-resolve"; import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; /** * @type {rollup.RollupFileOptions} @@ -25,7 +26,14 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps()] + plugins: [nodeResolve({ module: true }), sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] }; export default config; diff --git a/sdk/synapse/synapse-access-control/src/accessControlClient.ts b/sdk/synapse/synapse-access-control/src/accessControlClient.ts index 0a7ff0c8a2e3..0900ef061e6f 100644 --- a/sdk/synapse/synapse-access-control/src/accessControlClient.ts +++ b/sdk/synapse/synapse-access-control/src/accessControlClient.ts @@ -1,5 +1,7 @@ import * as coreHttp from "@azure/core-http"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "./tracing"; import * as Parameters from "./models/parameters"; import * as Mappers from "./models/mappers"; import { AccessControlClientContext } from "./accessControlClientContext"; @@ -79,16 +81,31 @@ export class AccessControlClient extends AccessControlClientContext { * List roles. * @param options The options parameters. */ - private _getRoleDefinitions( + private async _getRoleDefinitions( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-_getRoleDefinitions", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.sendOperationRequest( - operationArguments, - getRoleDefinitionsOperationSpec - ) as Promise; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleDefinitionsOperationSpec + ); + return result as AccessControlClientGetRoleDefinitionsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -96,18 +113,33 @@ export class AccessControlClient extends AccessControlClientContext { * @param roleId Synapse Built-In Role Id. * @param options The options parameters. */ - getRoleDefinitionById( + async getRoleDefinitionById( roleId: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-getRoleDefinitionById", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { roleId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.sendOperationRequest( - operationArguments, - getRoleDefinitionByIdOperationSpec - ) as Promise; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleDefinitionByIdOperationSpec + ); + return result as AccessControlClientGetRoleDefinitionByIdResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -115,34 +147,64 @@ export class AccessControlClient extends AccessControlClientContext { * @param createRoleAssignmentOptions Details of role id and object id. * @param options The options parameters. */ - createRoleAssignment( + async createRoleAssignment( createRoleAssignmentOptions: RoleAssignmentOptions, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-createRoleAssignment", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { createRoleAssignmentOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.sendOperationRequest( - operationArguments, - createRoleAssignmentOperationSpec - ) as Promise; + try { + const result = await this.sendOperationRequest( + operationArguments, + createRoleAssignmentOperationSpec + ); + return result as AccessControlClientCreateRoleAssignmentResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** * List role assignments. * @param options The options parameters. */ - getRoleAssignments( + async getRoleAssignments( options?: AccessControlClientGetRoleAssignmentsOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-getRoleAssignments", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.sendOperationRequest( - operationArguments, - getRoleAssignmentsOperationSpec - ) as Promise; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleAssignmentsOperationSpec + ); + return result as AccessControlClientGetRoleAssignmentsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -150,18 +212,33 @@ export class AccessControlClient extends AccessControlClientContext { * @param roleAssignmentId The ID of the role assignment. * @param options The options parameters. */ - getRoleAssignmentById( + async getRoleAssignmentById( roleAssignmentId: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-getRoleAssignmentById", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { roleAssignmentId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.sendOperationRequest( - operationArguments, - getRoleAssignmentByIdOperationSpec - ) as Promise; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleAssignmentByIdOperationSpec + ); + return result as AccessControlClientGetRoleAssignmentByIdResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -169,34 +246,64 @@ export class AccessControlClient extends AccessControlClientContext { * @param roleAssignmentId The ID of the role assignment. * @param options The options parameters. */ - deleteRoleAssignmentById( + async deleteRoleAssignmentById( roleAssignmentId: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-deleteRoleAssignmentById", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { roleAssignmentId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.sendOperationRequest( - operationArguments, - deleteRoleAssignmentByIdOperationSpec - ) as Promise; + try { + const result = await this.sendOperationRequest( + operationArguments, + deleteRoleAssignmentByIdOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** * List role assignments of the caller. * @param options The options parameters. */ - getCallerRoleAssignments( + async getCallerRoleAssignments( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-getCallerRoleAssignments", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.sendOperationRequest( - operationArguments, - getCallerRoleAssignmentsOperationSpec - ) as Promise; + try { + const result = await this.sendOperationRequest( + operationArguments, + getCallerRoleAssignmentsOperationSpec + ); + return result as AccessControlClientGetCallerRoleAssignmentsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -204,18 +311,33 @@ export class AccessControlClient extends AccessControlClientContext { * @param nextLink The nextLink from the previous successful call to the GetRoleDefinitions method. * @param options The options parameters. */ - private _getRoleDefinitionsNext( + private async _getRoleDefinitionsNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-_getRoleDefinitionsNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.sendOperationRequest( - operationArguments, - getRoleDefinitionsNextOperationSpec - ) as Promise; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleDefinitionsNextOperationSpec + ); + return result as AccessControlClientGetRoleDefinitionsNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-access-control/src/tracing.ts b/sdk/synapse/synapse-access-control/src/tracing.ts new file mode 100644 index 000000000000..164b82721674 --- /dev/null +++ b/sdk/synapse/synapse-access-control/src/tracing.ts @@ -0,0 +1,6 @@ +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.AccessControl", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-artifacts/README.md b/sdk/synapse/synapse-artifacts/README.md index b72806a29b9a..601446dc8bb7 100644 --- a/sdk/synapse/synapse-artifacts/README.md +++ b/sdk/synapse/synapse-artifacts/README.md @@ -1,6 +1,6 @@ ## Azure Synapse Artifacts client library for JavaScript -This package contains an isomorphic SDK for SparkClient. +This package contains an isomorphic SDK for Artifacts. ## Getting started diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index a0cef3cb4772..d367ec99840b 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -7,6 +7,7 @@ "@azure/core-lro": "^1.0.2", "@azure/core-paging": "^1.1.1", "@azure/core-http": "^1.2.0", + "@opentelemetry/api": "^0.10.2", "tslib": "^2.0.0" }, "keywords": [ @@ -25,6 +26,7 @@ "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", "uglify-js": "^3.4.9", "@microsoft/api-extractor": "7.7.11" }, diff --git a/sdk/synapse/synapse-artifacts/rollup.config.js b/sdk/synapse/synapse-artifacts/rollup.config.js index 1cc609990f10..9dcdf6c57b63 100644 --- a/sdk/synapse/synapse-artifacts/rollup.config.js +++ b/sdk/synapse/synapse-artifacts/rollup.config.js @@ -1,6 +1,7 @@ import rollup from "rollup"; import nodeResolve from "rollup-plugin-node-resolve"; import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; /** * @type {rollup.RollupFileOptions} @@ -25,7 +26,14 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps()] + plugins: [nodeResolve({ module: true }), sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] }; export default config; diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index 977767e2578f..e157acb1da39 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -22,14 +24,31 @@ export class BigDataPools { * List Big Data Pools * @param options The options parameters. */ - list(options?: coreHttp.OperationOptions): Promise { + async list( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ); + return result as BigDataPoolsListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -37,18 +56,33 @@ export class BigDataPools { * @param bigDataPoolName The Big Data Pool name * @param options The options parameters. */ - get( + async get( bigDataPoolName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { bigDataPoolName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); + return result as BigDataPoolsGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index 3dffca96470b..bb4f356ac3fc 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -85,18 +87,31 @@ export class DataFlow { dataFlow: DataFlowResource, options?: DataFlowCreateOrUpdateDataFlowOptionalParams ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateDataFlow", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { dataFlowName, dataFlow, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - DataFlowCreateOrUpdateDataFlowResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as DataFlowCreateOrUpdateDataFlowResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -116,18 +131,33 @@ export class DataFlow { * @param dataFlowName The data flow name. * @param options The options parameters. */ - getDataFlow( + async getDataFlow( dataFlowName: string, options?: DataFlowGetDataFlowOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getDataFlow", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { dataFlowName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getDataFlowOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDataFlowOperationSpec + ); + return result as DataFlowGetDataFlowResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -139,17 +169,30 @@ export class DataFlow { dataFlowName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteDataFlow", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { dataFlowName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -175,18 +218,31 @@ export class DataFlow { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameDataFlow", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { dataFlowName, request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -205,16 +261,31 @@ export class DataFlow { * Lists data flows. * @param options The options parameters. */ - private _getDataFlowsByWorkspace( + private async _getDataFlowsByWorkspace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDataFlowsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getDataFlowsByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDataFlowsByWorkspaceOperationSpec + ); + return result as DataFlowGetDataFlowsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -223,18 +294,33 @@ export class DataFlow { * method. * @param options The options parameters. */ - private _getDataFlowsByWorkspaceNext( + private async _getDataFlowsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDataFlowsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getDataFlowsByWorkspaceNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDataFlowsByWorkspaceNextOperationSpec + ); + return result as DataFlowGetDataFlowsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index f0e17e80dafd..9cb3b7b4040d 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -89,17 +91,30 @@ export class DataFlowDebugSession { ): Promise< LROPoller > { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createDataFlowDebugSession", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - DataFlowDebugSessionCreateDataFlowDebugSessionResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as DataFlowDebugSessionCreateDataFlowDebugSessionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -118,20 +133,33 @@ export class DataFlowDebugSession { * Query all active data flow debug sessions. * @param options The options parameters. */ - private _queryDataFlowDebugSessionsByWorkspace( + private async _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions ): Promise< DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse > { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - queryDataFlowDebugSessionsByWorkspaceOperationSpec - ) as Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - >; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryDataFlowDebugSessionsByWorkspaceOperationSpec + ); + return result as DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -139,18 +167,33 @@ export class DataFlowDebugSession { * @param request Data flow debug session definition with debug content. * @param options The options parameters. */ - addDataFlow( + async addDataFlow( request: DataFlowDebugPackage, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-addDataFlow", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { request, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - addDataFlowOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + addDataFlowOperationSpec + ); + return result as DataFlowDebugSessionAddDataFlowResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -158,18 +201,33 @@ export class DataFlowDebugSession { * @param request Data flow debug session definition for deletion * @param options The options parameters. */ - deleteDataFlowDebugSession( + async deleteDataFlowDebugSession( request: DeleteDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteDataFlowDebugSession", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { request, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - deleteDataFlowDebugSessionOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + deleteDataFlowDebugSessionOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -181,17 +239,30 @@ export class DataFlowDebugSession { request: DataFlowDebugCommandRequest, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-executeCommand", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - DataFlowDebugSessionExecuteCommandResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as DataFlowDebugSessionExecuteCommandResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -212,22 +283,35 @@ export class DataFlowDebugSession { * QueryDataFlowDebugSessionsByWorkspace method. * @param options The options parameters. */ - private _queryDataFlowDebugSessionsByWorkspaceNext( + private async _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise< DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse > { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - queryDataFlowDebugSessionsByWorkspaceNextOperationSpec - ) as Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - >; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryDataFlowDebugSessionsByWorkspaceNextOperationSpec + ); + return result as DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 0cef4925e7c0..498a8bf4700b 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -78,16 +80,31 @@ export class Dataset { * Lists datasets. * @param options The options parameters. */ - private _getDatasetsByWorkspace( + private async _getDatasetsByWorkspace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDatasetsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getDatasetsByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDatasetsByWorkspaceOperationSpec + ); + return result as DatasetGetDatasetsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -101,18 +118,31 @@ export class Dataset { dataset: DatasetResource, options?: DatasetCreateOrUpdateDatasetOptionalParams ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateDataset", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { datasetName, dataset, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - DatasetCreateOrUpdateDatasetResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as DatasetCreateOrUpdateDatasetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -132,18 +162,33 @@ export class Dataset { * @param datasetName The dataset name. * @param options The options parameters. */ - getDataset( + async getDataset( datasetName: string, options?: DatasetGetDatasetOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getDataset", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { datasetName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getDatasetOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDatasetOperationSpec + ); + return result as DatasetGetDatasetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -155,17 +200,30 @@ export class Dataset { datasetName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteDataset", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { datasetName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -191,18 +249,31 @@ export class Dataset { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameDataset", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { datasetName, request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -222,18 +293,33 @@ export class Dataset { * @param nextLink The nextLink from the previous successful call to the GetDatasetsByWorkspace method. * @param options The options parameters. */ - private _getDatasetsByWorkspaceNext( + private async _getDatasetsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDatasetsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getDatasetsByWorkspaceNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDatasetsByWorkspaceNextOperationSpec + ); + return result as DatasetGetDatasetsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index 4736ccb3746a..37b3ca7bc218 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -25,16 +27,31 @@ export class IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - list( + async list( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ); + return result as IntegrationRuntimesListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -42,18 +59,33 @@ export class IntegrationRuntimes { * @param integrationRuntimeName The Integration Runtime name * @param options The options parameters. */ - get( + async get( integrationRuntimeName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { integrationRuntimeName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); + return result as IntegrationRuntimesGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index 35436c92ba04..a95a73f8f4c2 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -80,16 +82,31 @@ export class LinkedService { * Lists linked services. * @param options The options parameters. */ - private _getLinkedServicesByWorkspace( + private async _getLinkedServicesByWorkspace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getLinkedServicesByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getLinkedServicesByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getLinkedServicesByWorkspaceOperationSpec + ); + return result as LinkedServiceGetLinkedServicesByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -103,18 +120,31 @@ export class LinkedService { linkedService: LinkedServiceResource, options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateLinkedService", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, linkedService, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - LinkedServiceCreateOrUpdateLinkedServiceResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as LinkedServiceCreateOrUpdateLinkedServiceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -134,18 +164,33 @@ export class LinkedService { * @param linkedServiceName The linked service name. * @param options The options parameters. */ - getLinkedService( + async getLinkedService( linkedServiceName: string, options?: LinkedServiceGetLinkedServiceOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getLinkedService", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getLinkedServiceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getLinkedServiceOperationSpec + ); + return result as LinkedServiceGetLinkedServiceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -157,17 +202,30 @@ export class LinkedService { linkedServiceName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteLinkedService", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -193,18 +251,31 @@ export class LinkedService { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameLinkedService", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { linkedServiceName, request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -225,18 +296,33 @@ export class LinkedService { * method. * @param options The options parameters. */ - private _getLinkedServicesByWorkspaceNext( + private async _getLinkedServicesByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getLinkedServicesByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getLinkedServicesByWorkspaceNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getLinkedServicesByWorkspaceNextOperationSpec + ); + return result as LinkedServiceGetLinkedServicesByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index df003d827a91..e13a44e8ec7c 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -127,32 +129,62 @@ export class Notebook { * Lists Notebooks. * @param options The options parameters. */ - private _getNotebooksByWorkspace( + private async _getNotebooksByWorkspace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getNotebooksByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getNotebooksByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebooksByWorkspaceOperationSpec + ); + return result as NotebookGetNotebooksByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** * Lists a summary of Notebooks. * @param options The options parameters. */ - private _getNotebookSummaryByWorkSpace( + private async _getNotebookSummaryByWorkSpace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getNotebookSummaryByWorkSpace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getNotebookSummaryByWorkSpaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebookSummaryByWorkSpaceOperationSpec + ); + return result as NotebookGetNotebookSummaryByWorkSpaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -166,18 +198,31 @@ export class Notebook { notebook: NotebookResource, options?: NotebookCreateOrUpdateNotebookOptionalParams ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateNotebook", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { notebookName, notebook, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - NotebookCreateOrUpdateNotebookResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as NotebookCreateOrUpdateNotebookResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -197,18 +242,33 @@ export class Notebook { * @param notebookName The notebook name. * @param options The options parameters. */ - getNotebook( + async getNotebook( notebookName: string, options?: NotebookGetNotebookOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getNotebook", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { notebookName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getNotebookOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebookOperationSpec + ); + return result as NotebookGetNotebookResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -220,17 +280,30 @@ export class Notebook { notebookName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteNotebook", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { notebookName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -256,18 +329,31 @@ export class Notebook { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameNotebook", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { notebookName, request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -288,18 +374,33 @@ export class Notebook { * method. * @param options The options parameters. */ - private _getNotebooksByWorkspaceNext( + private async _getNotebooksByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getNotebooksByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getNotebooksByWorkspaceNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebooksByWorkspaceNextOperationSpec + ); + return result as NotebookGetNotebooksByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -308,18 +409,33 @@ export class Notebook { * method. * @param options The options parameters. */ - private _getNotebookSummaryByWorkSpaceNext( + private async _getNotebookSummaryByWorkSpaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getNotebookSummaryByWorkSpaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getNotebookSummaryByWorkSpaceNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebookSummaryByWorkSpaceNextOperationSpec + ); + return result as NotebookGetNotebookSummaryByWorkSpaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index 87c49bd6c9ae..2ecf0f6b7150 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -80,16 +82,31 @@ export class Pipeline { * Lists pipelines. * @param options The options parameters. */ - private _getPipelinesByWorkspace( + private async _getPipelinesByWorkspace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getPipelinesByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getPipelinesByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getPipelinesByWorkspaceOperationSpec + ); + return result as PipelineGetPipelinesByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -103,18 +120,31 @@ export class Pipeline { pipeline: PipelineResource, options?: PipelineCreateOrUpdatePipelineOptionalParams ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdatePipeline", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, pipeline, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - PipelineCreateOrUpdatePipelineResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as PipelineCreateOrUpdatePipelineResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -134,18 +164,33 @@ export class Pipeline { * @param pipelineName The pipeline name. * @param options The options parameters. */ - getPipeline( + async getPipeline( pipelineName: string, options?: PipelineGetPipelineOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getPipeline", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getPipelineOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getPipelineOperationSpec + ); + return result as PipelineGetPipelineResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -157,17 +202,30 @@ export class Pipeline { pipelineName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deletePipeline", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -193,18 +251,31 @@ export class Pipeline { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renamePipeline", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -224,18 +295,33 @@ export class Pipeline { * @param pipelineName The pipeline name. * @param options The options parameters. */ - createPipelineRun( + async createPipelineRun( pipelineName: string, options?: PipelineCreatePipelineRunOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createPipelineRun", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - createPipelineRunOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createPipelineRunOperationSpec + ); + return result as PipelineCreatePipelineRunResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -244,18 +330,33 @@ export class Pipeline { * method. * @param options The options parameters. */ - private _getPipelinesByWorkspaceNext( + private async _getPipelinesByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getPipelinesByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getPipelinesByWorkspaceNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getPipelinesByWorkspaceNextOperationSpec + ); + return result as PipelineGetPipelinesByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 9c2c3370e5cc..b93781d2b254 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -29,18 +31,33 @@ export class PipelineRun { * @param filterParameters Parameters to filter the pipeline run. * @param options The options parameters. */ - queryPipelineRunsByWorkspace( + async queryPipelineRunsByWorkspace( filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-queryPipelineRunsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - queryPipelineRunsByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryPipelineRunsByWorkspaceOperationSpec + ); + return result as PipelineRunQueryPipelineRunsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -48,18 +65,33 @@ export class PipelineRun { * @param runId The pipeline run identifier. * @param options The options parameters. */ - getPipelineRun( + async getPipelineRun( runId: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getPipelineRun", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { runId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getPipelineRunOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getPipelineRunOperationSpec + ); + return result as PipelineRunGetPipelineRunResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -69,22 +101,37 @@ export class PipelineRun { * @param filterParameters Parameters to filter the activity runs. * @param options The options parameters. */ - queryActivityRuns( + async queryActivityRuns( pipelineName: string, runId: string, filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-queryActivityRuns", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { pipelineName, runId, filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - queryActivityRunsOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryActivityRunsOperationSpec + ); + return result as PipelineRunQueryActivityRunsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -92,18 +139,33 @@ export class PipelineRun { * @param runId The pipeline run identifier. * @param options The options parameters. */ - cancelPipelineRun( + async cancelPipelineRun( runId: string, options?: PipelineRunCancelPipelineRunOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-cancelPipelineRun", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { runId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - cancelPipelineRunOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelPipelineRunOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index 0604bdeeaf1b..88a8c6ebde47 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -82,16 +84,31 @@ export class SparkJobDefinition { * Lists spark job definitions. * @param options The options parameters. */ - private _getSparkJobDefinitionsByWorkspace( + private async _getSparkJobDefinitionsByWorkspace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getSparkJobDefinitionsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkJobDefinitionsByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionsByWorkspaceOperationSpec + ); + return result as SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -100,20 +117,35 @@ export class SparkJobDefinition { * @param sparkJobDefinition Spark Job Definition resource definition. * @param options The options parameters. */ - createOrUpdateSparkJobDefinition( + async createOrUpdateSparkJobDefinition( sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateSparkJobDefinition", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, sparkJobDefinition, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - createOrUpdateSparkJobDefinitionOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createOrUpdateSparkJobDefinitionOperationSpec + ); + return result as SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -121,18 +153,33 @@ export class SparkJobDefinition { * @param sparkJobDefinitionName The spark job definition name. * @param options The options parameters. */ - getSparkJobDefinition( + async getSparkJobDefinition( sparkJobDefinitionName: string, options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getSparkJobDefinition", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkJobDefinitionOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionOperationSpec + ); + return result as SparkJobDefinitionGetSparkJobDefinitionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -140,18 +187,33 @@ export class SparkJobDefinition { * @param sparkJobDefinitionName The spark job definition name. * @param options The options parameters. */ - deleteSparkJobDefinition( + async deleteSparkJobDefinition( sparkJobDefinitionName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteSparkJobDefinition", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - deleteSparkJobDefinitionOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + deleteSparkJobDefinitionOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -163,17 +225,30 @@ export class SparkJobDefinition { sparkJobDefinitionName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-executeSparkJobDefinition", + this.getOperationOptions(options, "location") + ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, - options: this.getOperationOptions(options, "location") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - SparkJobDefinitionExecuteSparkJobDefinitionResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as SparkJobDefinitionExecuteSparkJobDefinitionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -200,18 +275,31 @@ export class SparkJobDefinition { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameSparkJobDefinition", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionName, request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -235,17 +323,30 @@ export class SparkJobDefinition { sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-debugSparkJobDefinition", + this.getOperationOptions(options, "location") + ); const operationArguments: coreHttp.OperationArguments = { sparkJobDefinitionAzureResource, - options: this.getOperationOptions(options, "location") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - SparkJobDefinitionDebugSparkJobDefinitionResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as SparkJobDefinitionDebugSparkJobDefinitionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -267,20 +368,33 @@ export class SparkJobDefinition { * GetSparkJobDefinitionsByWorkspace method. * @param options The options parameters. */ - private _getSparkJobDefinitionsByWorkspaceNext( + private async _getSparkJobDefinitionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getSparkJobDefinitionsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkJobDefinitionsByWorkspaceNextOperationSpec - ) as Promise< - SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse - >; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionsByWorkspaceNextOperationSpec + ); + return result as SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index dc58292e2873..45d709eebc42 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -22,14 +24,31 @@ export class SqlPools { * List Sql Pools * @param options The options parameters. */ - list(options?: coreHttp.OperationOptions): Promise { + async list( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ); + return result as SqlPoolsListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -37,18 +56,33 @@ export class SqlPools { * @param sqlPoolName The Sql Pool name * @param options The options parameters. */ - get( + async get( sqlPoolName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sqlPoolName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); + return result as SqlPoolsGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index d3e08c1f3ea1..94bba0c3edbf 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -78,16 +80,31 @@ export class SqlScript { * Lists sql scripts. * @param options The options parameters. */ - private _getSqlScriptsByWorkspace( + private async _getSqlScriptsByWorkspace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getSqlScriptsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSqlScriptsByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSqlScriptsByWorkspaceOperationSpec + ); + return result as SqlScriptGetSqlScriptsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -96,20 +113,35 @@ export class SqlScript { * @param sqlScript Sql Script resource definition. * @param options The options parameters. */ - createOrUpdateSqlScript( + async createOrUpdateSqlScript( sqlScriptName: string, sqlScript: SqlScriptResource, options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateSqlScript", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, sqlScript, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - createOrUpdateSqlScriptOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createOrUpdateSqlScriptOperationSpec + ); + return result as SqlScriptCreateOrUpdateSqlScriptResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -117,18 +149,33 @@ export class SqlScript { * @param sqlScriptName The sql script name. * @param options The options parameters. */ - getSqlScript( + async getSqlScript( sqlScriptName: string, options?: SqlScriptGetSqlScriptOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getSqlScript", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSqlScriptOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSqlScriptOperationSpec + ); + return result as SqlScriptGetSqlScriptResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -136,18 +183,33 @@ export class SqlScript { * @param sqlScriptName The sql script name. * @param options The options parameters. */ - deleteSqlScript( + async deleteSqlScript( sqlScriptName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteSqlScript", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - deleteSqlScriptOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + deleteSqlScriptOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -161,18 +223,31 @@ export class SqlScript { request: ArtifactRenameRequest, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameSqlScript", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { sqlScriptName, request, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -193,18 +268,33 @@ export class SqlScript { * method. * @param options The options parameters. */ - private _getSqlScriptsByWorkspaceNext( + private async _getSqlScriptsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getSqlScriptsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSqlScriptsByWorkspaceNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSqlScriptsByWorkspaceNextOperationSpec + ); + return result as SqlScriptGetSqlScriptsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index 4f333b059259..bcc0173f84a1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -80,16 +82,31 @@ export class Trigger { * Lists triggers. * @param options The options parameters. */ - private _getTriggersByWorkspace( + private async _getTriggersByWorkspace( options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getTriggersByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getTriggersByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getTriggersByWorkspaceOperationSpec + ); + return result as TriggerGetTriggersByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -103,18 +120,31 @@ export class Trigger { trigger: TriggerResource, options?: TriggerCreateOrUpdateTriggerOptionalParams ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateTrigger", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { triggerName, trigger, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - TriggerCreateOrUpdateTriggerResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as TriggerCreateOrUpdateTriggerResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -134,18 +164,33 @@ export class Trigger { * @param triggerName The trigger name. * @param options The options parameters. */ - getTrigger( + async getTrigger( triggerName: string, options?: TriggerGetTriggerOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getTrigger", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getTriggerOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getTriggerOperationSpec + ); + return result as TriggerGetTriggerResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -157,17 +202,30 @@ export class Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteTrigger", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -191,17 +249,30 @@ export class Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-subscribeTriggerToEvents", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - TriggerSubscribeTriggerToEventsResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as TriggerSubscribeTriggerToEventsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -221,18 +292,33 @@ export class Trigger { * @param triggerName The trigger name. * @param options The options parameters. */ - getEventSubscriptionStatus( + async getEventSubscriptionStatus( triggerName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getEventSubscriptionStatus", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getEventSubscriptionStatusOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getEventSubscriptionStatusOperationSpec + ); + return result as TriggerGetEventSubscriptionStatusResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -244,17 +330,30 @@ export class Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-unsubscribeTriggerFromEvents", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - TriggerUnsubscribeTriggerFromEventsResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as TriggerUnsubscribeTriggerFromEventsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -278,17 +377,30 @@ export class Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-startTrigger", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -312,17 +424,30 @@ export class Trigger { triggerName: string, options?: coreHttp.OperationOptions ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-stopTrigger", + this.getOperationOptions(options, "undefined") + ); const operationArguments: coreHttp.OperationArguments = { triggerName, - options: this.getOperationOptions(options, "undefined") + options: updatedOptions }; - const sendOperation = ( + const sendOperation = async ( args: coreHttp.OperationArguments, spec: coreHttp.OperationSpec ) => { - return this.client.sendOperationRequest(args, spec) as Promise< - coreHttp.RestResponse - >; + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } }; const initialOperationResult = await sendOperation( @@ -342,18 +467,33 @@ export class Trigger { * @param nextLink The nextLink from the previous successful call to the GetTriggersByWorkspace method. * @param options The options parameters. */ - private _getTriggersByWorkspaceNext( + private async _getTriggersByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getTriggersByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getTriggersByWorkspaceNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getTriggersByWorkspaceNextOperationSpec + ); + return result as TriggerGetTriggersByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } private getOperationOptions( diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index 20e37cfac269..739820f2f954 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -27,20 +29,35 @@ export class TriggerRun { * @param runId The pipeline run identifier. * @param options The options parameters. */ - rerunTriggerInstance( + async rerunTriggerInstance( triggerName: string, runId: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-rerunTriggerInstance", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { triggerName, runId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - rerunTriggerInstanceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + rerunTriggerInstanceOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -49,20 +66,35 @@ export class TriggerRun { * @param runId The pipeline run identifier. * @param options The options parameters. */ - cancelTriggerInstance( + async cancelTriggerInstance( triggerName: string, runId: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-cancelTriggerInstance", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { triggerName, runId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - cancelTriggerInstanceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelTriggerInstanceOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -70,18 +102,33 @@ export class TriggerRun { * @param filterParameters Parameters to filter the pipeline run. * @param options The options parameters. */ - queryTriggerRunsByWorkspace( + async queryTriggerRunsByWorkspace( filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-queryTriggerRunsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { filterParameters, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - queryTriggerRunsByWorkspaceOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryTriggerRunsByWorkspaceOperationSpec + ); + return result as TriggerRunQueryTriggerRunsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 4c4453399fb4..43c4f1d36a87 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -22,14 +24,31 @@ export class Workspace { * Get Workspace * @param options The options parameters. */ - get(options?: coreHttp.OperationOptions): Promise { + async get( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); + return result as WorkspaceGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 058888c80532..5564aca2fdd1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -27,18 +29,33 @@ export class WorkspaceGitRepoManagement { * @param gitHubAccessTokenRequest * @param options The options parameters. */ - getGitHubAccessToken( + async getGitHubAccessToken( gitHubAccessTokenRequest: GitHubAccessTokenRequest, options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getGitHubAccessToken", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { gitHubAccessTokenRequest, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getGitHubAccessTokenOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getGitHubAccessTokenOperationSpec + ); + return result as WorkspaceGitRepoManagementGetGitHubAccessTokenResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-artifacts/src/tracing.ts b/sdk/synapse/synapse-artifacts/src/tracing.ts new file mode 100644 index 000000000000..a72edabac525 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/tracing.ts @@ -0,0 +1,6 @@ +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.Artifacts", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-managed-private-endpoints/README.md b/sdk/synapse/synapse-managed-private-endpoints/README.md index 6c0534ddb257..eee1a7db719b 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/README.md +++ b/sdk/synapse/synapse-managed-private-endpoints/README.md @@ -1,6 +1,6 @@ ## Azure Synapse Managed Private Endpoints client library for JavaScript -This package contains an isomorphic SDK for SparkClient. +This package contains an isomorphic SDK for Managed Private Endpoints. ## Getting started diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index a8669a15fec7..7908df34825a 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -6,6 +6,7 @@ "dependencies": { "@azure/core-paging": "^1.1.1", "@azure/core-http": "^1.2.0", + "@opentelemetry/api": "^0.10.2", "tslib": "^2.0.0" }, "keywords": [ @@ -24,6 +25,7 @@ "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", "uglify-js": "^3.4.9", "@microsoft/api-extractor": "7.7.11" }, diff --git a/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js index bcc4c1c74437..d7c4da5c6839 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js +++ b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js @@ -1,6 +1,7 @@ import rollup from "rollup"; import nodeResolve from "rollup-plugin-node-resolve"; import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; /** * @type {rollup.RollupFileOptions} @@ -25,7 +26,14 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps()] + plugins: [nodeResolve({ module: true }), sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] }; export default config; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts index d354c10f3bf2..ee17cbb59aea 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; @@ -84,20 +86,35 @@ export class ManagedPrivateEndpoints { * @param managedPrivateEndpointName Managed private endpoint name * @param options The options parameters. */ - get( + async get( managedVirtualNetworkName: string, managedPrivateEndpointName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { managedVirtualNetworkName, managedPrivateEndpointName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getOperationSpec + ); + return result as ManagedPrivateEndpointsGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -107,22 +124,37 @@ export class ManagedPrivateEndpoints { * @param managedPrivateEndpoint Managed private endpoint properties. * @param options The options parameters. */ - create( + async create( managedVirtualNetworkName: string, managedPrivateEndpointName: string, managedPrivateEndpoint: ManagedPrivateEndpoint, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-create", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { managedVirtualNetworkName, managedPrivateEndpointName, managedPrivateEndpoint, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - createOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createOperationSpec + ); + return result as ManagedPrivateEndpointsCreateResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -131,20 +163,35 @@ export class ManagedPrivateEndpoints { * @param managedPrivateEndpointName Managed private endpoint name * @param options The options parameters. */ - delete( + async delete( managedVirtualNetworkName: string, managedPrivateEndpointName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-delete", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { managedVirtualNetworkName, managedPrivateEndpointName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - deleteOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + deleteOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -152,18 +199,33 @@ export class ManagedPrivateEndpoints { * @param managedVirtualNetworkName Managed virtual network name * @param options The options parameters. */ - private _list( + private async _list( managedVirtualNetworkName: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-_list", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { managedVirtualNetworkName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + listOperationSpec + ); + return result as ManagedPrivateEndpointsListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -172,20 +234,35 @@ export class ManagedPrivateEndpoints { * @param nextLink The nextLink from the previous successful call to the List method. * @param options The options parameters. */ - private _listNext( + private async _listNext( managedVirtualNetworkName: string, nextLink: string, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-_listNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { managedVirtualNetworkName, nextLink, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - listNextOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + listNextOperationSpec + ); + return result as ManagedPrivateEndpointsListNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts b/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts new file mode 100644 index 000000000000..9820227dc43c --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts @@ -0,0 +1,6 @@ +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.ManagedPrivateEndpoints", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-monitoring/README.md b/sdk/synapse/synapse-monitoring/README.md index 6be5c613204f..e24af2b09d58 100644 --- a/sdk/synapse/synapse-monitoring/README.md +++ b/sdk/synapse/synapse-monitoring/README.md @@ -1,6 +1,6 @@ ## Azure Synapse Monitoring client library for JavaScript -This package contains an isomorphic SDK for SparkClient. +This package contains an isomorphic SDK for Monitoring. ## Getting started diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index fa201990d0b1..4d87bda95ce9 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -5,6 +5,7 @@ "version": "1.0.0-beta.1", "dependencies": { "@azure/core-http": "^1.2.0", + "@opentelemetry/api": "^0.10.2", "tslib": "^2.0.0" }, "keywords": [ @@ -23,6 +24,7 @@ "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", "uglify-js": "^3.4.9", "@microsoft/api-extractor": "7.7.11" }, diff --git a/sdk/synapse/synapse-monitoring/rollup.config.js b/sdk/synapse/synapse-monitoring/rollup.config.js index cc118fad214c..c39e8ad35e9a 100644 --- a/sdk/synapse/synapse-monitoring/rollup.config.js +++ b/sdk/synapse/synapse-monitoring/rollup.config.js @@ -1,6 +1,7 @@ import rollup from "rollup"; import nodeResolve from "rollup-plugin-node-resolve"; import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; /** * @type {rollup.RollupFileOptions} @@ -25,7 +26,14 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps()] + plugins: [nodeResolve({ module: true }), sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] }; export default config; diff --git a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts index 53f2223463d5..16641c10119a 100644 --- a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts +++ b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -27,32 +29,62 @@ export class Monitoring { * Get list of spark applications for the workspace. * @param options The options parameters. */ - getSparkJobList( + async getSparkJobList( options?: MonitoringGetSparkJobListOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "MonitoringClient-getSparkJobList", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkJobListOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkJobListOperationSpec + ); + return result as MonitoringGetSparkJobListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** * Get SQL OD/DW Query for the workspace. * @param options The options parameters. */ - getSqlJobQueryString( + async getSqlJobQueryString( options?: MonitoringGetSqlJobQueryStringOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "MonitoringClient-getSqlJobQueryString", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSqlJobQueryStringOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSqlJobQueryStringOperationSpec + ); + return result as MonitoringGetSqlJobQueryStringResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-monitoring/src/tracing.ts b/sdk/synapse/synapse-monitoring/src/tracing.ts new file mode 100644 index 000000000000..17451e9c02ae --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/tracing.ts @@ -0,0 +1,6 @@ +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.Monitoring", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-spark/README.md b/sdk/synapse/synapse-spark/README.md index 5fa1a44ea839..79816be8b0ef 100644 --- a/sdk/synapse/synapse-spark/README.md +++ b/sdk/synapse/synapse-spark/README.md @@ -1,6 +1,6 @@ ## Azure Synapse Spark client library for JavaScript -This package contains an isomorphic SDK for SparkClient. +This package contains an isomorphic SDK for Spark. ## Getting started diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 18c3c135c0d2..1177b257aa0f 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -5,6 +5,7 @@ "version": "1.0.0-beta.1", "dependencies": { "@azure/core-http": "^1.2.0", + "@opentelemetry/api": "^0.10.2", "tslib": "^2.0.0" }, "keywords": [ @@ -23,6 +24,7 @@ "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", "uglify-js": "^3.4.9", "@microsoft/api-extractor": "7.7.11" }, diff --git a/sdk/synapse/synapse-spark/rollup.config.js b/sdk/synapse/synapse-spark/rollup.config.js index ed9214dab22b..c497548aa119 100644 --- a/sdk/synapse/synapse-spark/rollup.config.js +++ b/sdk/synapse/synapse-spark/rollup.config.js @@ -1,6 +1,7 @@ import rollup from "rollup"; import nodeResolve from "rollup-plugin-node-resolve"; import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; /** * @type {rollup.RollupFileOptions} @@ -25,7 +26,14 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps()] + plugins: [nodeResolve({ module: true }), sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] }; export default config; diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts index d9d36b0a8b52..15d7daba57b2 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -30,16 +32,31 @@ export class SparkBatch { * List all spark batch jobs which are running under a particular spark pool. * @param options The options parameters. */ - getSparkBatchJobs( + async getSparkBatchJobs( options?: SparkBatchGetSparkBatchJobsOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkBatchJobs", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkBatchJobsOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkBatchJobsOperationSpec + ); + return result as SparkBatchGetSparkBatchJobsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -47,18 +64,33 @@ export class SparkBatch { * @param sparkBatchJobOptions Livy compatible batch job request payload. * @param options The options parameters. */ - createSparkBatchJob( + async createSparkBatchJob( sparkBatchJobOptions: SparkBatchJobOptions, options?: SparkBatchCreateSparkBatchJobOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-createSparkBatchJob", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sparkBatchJobOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - createSparkBatchJobOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createSparkBatchJobOperationSpec + ); + return result as SparkBatchCreateSparkBatchJobResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -66,18 +98,33 @@ export class SparkBatch { * @param batchId Identifier for the batch job. * @param options The options parameters. */ - getSparkBatchJob( + async getSparkBatchJob( batchId: number, options?: SparkBatchGetSparkBatchJobOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkBatchJob", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { batchId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkBatchJobOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkBatchJobOperationSpec + ); + return result as SparkBatchGetSparkBatchJobResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -85,18 +132,33 @@ export class SparkBatch { * @param batchId Identifier for the batch job. * @param options The options parameters. */ - cancelSparkBatchJob( + async cancelSparkBatchJob( batchId: number, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-cancelSparkBatchJob", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { batchId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - cancelSparkBatchJobOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelSparkBatchJobOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts index 844d4d4331ac..f475d080c114 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -1,3 +1,5 @@ +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; @@ -35,16 +37,31 @@ export class SparkSession { * List all spark sessions which are running under a particular spark pool. * @param options The options parameters. */ - getSparkSessions( + async getSparkSessions( options?: SparkSessionGetSparkSessionsOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkSessions", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkSessionsOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkSessionsOperationSpec + ); + return result as SparkSessionGetSparkSessionsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -52,18 +69,33 @@ export class SparkSession { * @param sparkSessionOptions Livy compatible batch job request payload. * @param options The options parameters. */ - createSparkSession( + async createSparkSession( sparkSessionOptions: SparkSessionOptions, options?: SparkSessionCreateSparkSessionOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-createSparkSession", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sparkSessionOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - createSparkSessionOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createSparkSessionOperationSpec + ); + return result as SparkSessionCreateSparkSessionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -71,18 +103,33 @@ export class SparkSession { * @param sessionId Identifier for the session. * @param options The options parameters. */ - getSparkSession( + async getSparkSession( sessionId: number, options?: SparkSessionGetSparkSessionOptionalParams ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkSession", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sessionId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkSessionOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkSessionOperationSpec + ); + return result as SparkSessionGetSparkSessionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -90,18 +137,33 @@ export class SparkSession { * @param sessionId Identifier for the session. * @param options The options parameters. */ - cancelSparkSession( + async cancelSparkSession( sessionId: number, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-cancelSparkSession", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sessionId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - cancelSparkSessionOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelSparkSessionOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -109,18 +171,33 @@ export class SparkSession { * @param sessionId Identifier for the session. * @param options The options parameters. */ - resetSparkSessionTimeout( + async resetSparkSessionTimeout( sessionId: number, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-resetSparkSessionTimeout", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sessionId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - resetSparkSessionTimeoutOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + resetSparkSessionTimeoutOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -128,18 +205,33 @@ export class SparkSession { * @param sessionId Identifier for the session. * @param options The options parameters. */ - getSparkStatements( + async getSparkStatements( sessionId: number, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkStatements", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sessionId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkStatementsOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkStatementsOperationSpec + ); + return result as SparkSessionGetSparkStatementsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -148,20 +240,35 @@ export class SparkSession { * @param sparkStatementOptions Livy compatible batch job request payload. * @param options The options parameters. */ - createSparkStatement( + async createSparkStatement( sessionId: number, sparkStatementOptions: SparkStatementOptions, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-createSparkStatement", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sessionId, sparkStatementOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - createSparkStatementOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createSparkStatementOperationSpec + ); + return result as SparkSessionCreateSparkStatementResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -170,20 +277,35 @@ export class SparkSession { * @param statementId Identifier for the statement. * @param options The options parameters. */ - getSparkStatement( + async getSparkStatement( sessionId: number, statementId: number, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkStatement", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sessionId, statementId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - getSparkStatementOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkStatementOperationSpec + ); + return result as SparkSessionGetSparkStatementResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } /** @@ -192,20 +314,35 @@ export class SparkSession { * @param statementId Identifier for the statement. * @param options The options parameters. */ - cancelSparkStatement( + async cancelSparkStatement( sessionId: number, statementId: number, options?: coreHttp.OperationOptions ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-cancelSparkStatement", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); const operationArguments: coreHttp.OperationArguments = { sessionId, statementId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: updatedOptions }; - return this.client.sendOperationRequest( - operationArguments, - cancelSparkStatementOperationSpec - ) as Promise; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelSparkStatementOperationSpec + ); + return result as SparkSessionCancelSparkStatementResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } } } // Operation Specifications diff --git a/sdk/synapse/synapse-spark/src/tracing.ts b/sdk/synapse/synapse-spark/src/tracing.ts new file mode 100644 index 000000000000..bb74ae42133b --- /dev/null +++ b/sdk/synapse/synapse-spark/src/tracing.ts @@ -0,0 +1,6 @@ +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.Spark", + packagePrefix: "Microsoft.Synapse" +}); From 4b7b505cc17c87e37daa73914a19e4b64a39d91a Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 9 Dec 2020 10:24:00 +1300 Subject: [PATCH 18/28] New regeneration with tracing --- sdk/synapse/synapse-access-control/README.md | 7 +++-- .../synapse-access-control/rollup.config.js | 4 ++- .../src/accessControlClient.ts | 6 +--- sdk/synapse/synapse-artifacts/README.md | 3 +- .../synapse-artifacts/rollup.config.js | 4 ++- .../src/operations/bigDataPools.ts | 14 ++------- .../src/operations/dataFlow.ts | 11 ++----- .../src/operations/dataFlowDebugSession.ts | 21 ++++--------- .../src/operations/dataset.ts | 11 ++----- .../src/operations/integrationRuntimes.ts | 19 +++--------- .../src/operations/linkedService.ts | 15 ++-------- .../src/operations/notebook.ts | 20 +++---------- .../src/operations/pipeline.ts | 11 ++----- .../src/operations/pipelineRun.ts | 6 +--- .../src/operations/sparkJobDefinition.ts | 15 ++-------- .../src/operations/sqlPools.ts | 14 ++------- .../src/operations/sqlScript.ts | 11 ++----- .../src/operations/trigger.ts | 11 ++----- .../src/operations/triggerRun.ts | 17 ++--------- .../src/operations/workspace.ts | 9 ++---- .../operations/workspaceGitRepoManagement.ts | 6 +--- .../README.md | 8 +++-- .../rollup.config.js | 4 ++- .../src/operations/managedPrivateEndpoints.ts | 30 ++++--------------- sdk/synapse/synapse-monitoring/README.md | 1 + .../synapse-monitoring/rollup.config.js | 4 ++- .../src/operations/monitoring.ts | 7 +---- sdk/synapse/synapse-spark/rollup.config.js | 4 ++- .../src/operations/sparkBatch.ts | 12 ++------ .../src/operations/sparkSession.ts | 12 ++------ 30 files changed, 83 insertions(+), 234 deletions(-) diff --git a/sdk/synapse/synapse-access-control/README.md b/sdk/synapse/synapse-access-control/README.md index e88208dad968..f7882010373f 100644 --- a/sdk/synapse/synapse-access-control/README.md +++ b/sdk/synapse/synapse-access-control/README.md @@ -26,11 +26,14 @@ import { DefaultAzureCredential } from "@azure/identity"; export async function main(): Promise { const credential = new DefaultAzureCredential(); - let client = new AccessControlClient(credential, "https://joturnersynapsetest.dev.azuresynapse.net"); + let client = new AccessControlClient( + credential, + "https://joturnersynapsetest.dev.azuresynapse.net" + ); let list = await client.listRoleDefinitions(); for await (let item of list) { console.log("item:", item); - } + } } ``` diff --git a/sdk/synapse/synapse-access-control/rollup.config.js b/sdk/synapse/synapse-access-control/rollup.config.js index 61ff3b304f30..a5d620e63cc1 100644 --- a/sdk/synapse/synapse-access-control/rollup.config.js +++ b/sdk/synapse/synapse-access-control/rollup.config.js @@ -26,7 +26,9 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps(), + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), cjs({ namedExports: { assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], diff --git a/sdk/synapse/synapse-access-control/src/accessControlClient.ts b/sdk/synapse/synapse-access-control/src/accessControlClient.ts index 0900ef061e6f..fafbf353df58 100644 --- a/sdk/synapse/synapse-access-control/src/accessControlClient.ts +++ b/sdk/synapse/synapse-access-control/src/accessControlClient.ts @@ -413,11 +413,7 @@ const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.ErrorContract } }, - queryParameters: [ - Parameters.apiVersion, - Parameters.roleId1, - Parameters.principalId - ], + queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.continuationToken], serializer diff --git a/sdk/synapse/synapse-artifacts/README.md b/sdk/synapse/synapse-artifacts/README.md index 601446dc8bb7..1301c7c4ceea 100644 --- a/sdk/synapse/synapse-artifacts/README.md +++ b/sdk/synapse/synapse-artifacts/README.md @@ -30,9 +30,10 @@ export async function main(): Promise { let list = await client.pipeline.listPipelinesByWorkspace(); for await (let item of list) { console.log("item:", item); - } + } } ``` + ## Related projects - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) diff --git a/sdk/synapse/synapse-artifacts/rollup.config.js b/sdk/synapse/synapse-artifacts/rollup.config.js index 9dcdf6c57b63..f18ce9d35045 100644 --- a/sdk/synapse/synapse-artifacts/rollup.config.js +++ b/sdk/synapse/synapse-artifacts/rollup.config.js @@ -26,7 +26,9 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps(), + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), cjs({ namedExports: { assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index e157acb1da39..afddce7a118c 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -24,9 +24,7 @@ export class BigDataPools { * List Big Data Pools * @param options The options parameters. */ - async list( - options?: coreHttp.OperationOptions - ): Promise { + async list(options?: coreHttp.OperationOptions): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-list", coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -35,10 +33,7 @@ export class BigDataPools { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); return result as BigDataPoolsListResponse; } catch (error) { span.setStatus({ @@ -69,10 +64,7 @@ export class BigDataPools { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as BigDataPoolsGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index bb4f356ac3fc..ba179c0fb658 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -59,10 +59,7 @@ export class DataFlow { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDataFlowsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -362,11 +359,7 @@ const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataFlow, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.dataFlowName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index 9cb3b7b4040d..7baff96e4392 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -61,10 +61,7 @@ export class DataFlowDebugSession { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._queryDataFlowDebugSessionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -73,9 +70,7 @@ export class DataFlowDebugSession { private async *queryDataFlowDebugSessionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage( - options - )) { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { yield* page; } } @@ -88,9 +83,7 @@ export class DataFlowDebugSession { async createDataFlowDebugSession( request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions - ): Promise< - LROPoller - > { + ): Promise> { const { span, updatedOptions } = createSpan( "ArtifactsClient-createDataFlowDebugSession", this.getOperationOptions(options, "undefined") @@ -135,9 +128,7 @@ export class DataFlowDebugSession { */ private async _queryDataFlowDebugSessionsByWorkspace( options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse - > { + ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspace", coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -286,9 +277,7 @@ export class DataFlowDebugSession { private async _queryDataFlowDebugSessionsByWorkspaceNext( nextLink: string, options?: coreHttp.OperationOptions - ): Promise< - DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse - > { + ): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspaceNext", coreHttp.operationOptionsToRequestOptionsBase(options || {}) diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 498a8bf4700b..0f7ffebdb852 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -59,10 +59,7 @@ export class Dataset { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getDatasetsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getDatasetsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -377,11 +374,7 @@ const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.dataset, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.datasetName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index 37b3ca7bc218..eb317043cae3 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -4,10 +4,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { - IntegrationRuntimesListResponse, - IntegrationRuntimesGetResponse -} from "../models"; +import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; /** * Class representing a IntegrationRuntimes. @@ -27,9 +24,7 @@ export class IntegrationRuntimes { * List Integration Runtimes * @param options The options parameters. */ - async list( - options?: coreHttp.OperationOptions - ): Promise { + async list(options?: coreHttp.OperationOptions): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-list", coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -38,10 +33,7 @@ export class IntegrationRuntimes { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); return result as IntegrationRuntimesListResponse; } catch (error) { span.setStatus({ @@ -72,10 +64,7 @@ export class IntegrationRuntimes { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as IntegrationRuntimesGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index a95a73f8f4c2..ceda490199f1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -59,10 +59,7 @@ export class LinkedService { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getLinkedServicesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -71,9 +68,7 @@ export class LinkedService { private async *getLinkedServicesByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getLinkedServicesByWorkspacePagingPage( - options - )) { + for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { yield* page; } } @@ -380,11 +375,7 @@ const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.linkedService, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index e13a44e8ec7c..5fc6f3e34f0e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -61,10 +61,7 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebooksByWorkspaceNext( - continuationToken, - options - ); + result = await this._getNotebooksByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -106,10 +103,7 @@ export class Notebook { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getNotebookSummaryByWorkSpaceNext( - continuationToken, - options - ); + result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -118,9 +112,7 @@ export class Notebook { private async *getNotebookSummaryByWorkSpacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getNotebookSummaryByWorkSpacePagingPage( - options - )) { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { yield* page; } } @@ -509,11 +501,7 @@ const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.notebook, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.notebookName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index 2ecf0f6b7150..e1b104d1b80e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -61,10 +61,7 @@ export class Pipeline { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getPipelinesByWorkspaceNext( - continuationToken, - options - ); + result = await this._getPipelinesByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -414,11 +411,7 @@ const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.pipeline, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.pipelineName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index b93781d2b254..19b249f180de 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -219,11 +219,7 @@ const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.filterParameters, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.pipelineName, - Parameters.runId - ], + urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index 88a8c6ebde47..6ed3fffd75c4 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -61,10 +61,7 @@ export class SparkJobDefinition { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSparkJobDefinitionsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -73,9 +70,7 @@ export class SparkJobDefinition { private async *getSparkJobDefinitionsByWorkspacePagingAll( options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage( - options - )) { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { yield* page; } } @@ -443,11 +438,7 @@ const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sparkJobDefinition, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index 45d709eebc42..5096b9fc5557 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -24,9 +24,7 @@ export class SqlPools { * List Sql Pools * @param options The options parameters. */ - async list( - options?: coreHttp.OperationOptions - ): Promise { + async list(options?: coreHttp.OperationOptions): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-list", coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -35,10 +33,7 @@ export class SqlPools { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); return result as SqlPoolsListResponse; } catch (error) { span.setStatus({ @@ -69,10 +64,7 @@ export class SqlPools { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as SqlPoolsGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index 94bba0c3edbf..6de62b75e088 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -59,10 +59,7 @@ export class SqlScript { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getSqlScriptsByWorkspaceNext( - continuationToken, - options - ); + result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -343,11 +340,7 @@ const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.sqlScript, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index bcc0173f84a1..cd37c09ae3f0 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -61,10 +61,7 @@ export class Trigger { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._getTriggersByWorkspaceNext( - continuationToken, - options - ); + result = await this._getTriggersByWorkspaceNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -551,11 +548,7 @@ const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.trigger, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint, Parameters.triggerName], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.ifMatch - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index 739820f2f954..26c4c7e40f2a 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -4,10 +4,7 @@ import * as coreHttp from "@azure/core-http"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { ArtifactsClient } from "../artifactsClient"; -import { - RunFilterParameters, - TriggerRunQueryTriggerRunsByWorkspaceResponse -} from "../models"; +import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; /** * Class representing a TriggerRun. @@ -145,11 +142,7 @@ const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; @@ -163,11 +156,7 @@ const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.runId, - Parameters.triggerName - ], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 43c4f1d36a87..3577e40b04e1 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -24,9 +24,7 @@ export class Workspace { * Get Workspace * @param options The options parameters. */ - async get( - options?: coreHttp.OperationOptions - ): Promise { + async get(options?: coreHttp.OperationOptions): Promise { const { span, updatedOptions } = createSpan( "ArtifactsClient-get", coreHttp.operationOptionsToRequestOptionsBase(options || {}) @@ -35,10 +33,7 @@ export class Workspace { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as WorkspaceGetResponse; } catch (error) { span.setStatus({ diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 5564aca2fdd1..682d9bc7277e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -73,11 +73,7 @@ const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { requestBody: Parameters.gitHubAccessTokenRequest, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.endpoint], - headerParameters: [ - Parameters.accept, - Parameters.contentType, - Parameters.clientRequestId - ], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], mediaType: "json", serializer }; diff --git a/sdk/synapse/synapse-managed-private-endpoints/README.md b/sdk/synapse/synapse-managed-private-endpoints/README.md index eee1a7db719b..a20661b376ab 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/README.md +++ b/sdk/synapse/synapse-managed-private-endpoints/README.md @@ -26,13 +26,17 @@ import { DefaultAzureCredential } from "@azure/identity"; export async function main(): Promise { const credential = new DefaultAzureCredential(); - let client = new ManagedPrivateEndpointsClient(credential, "https://mysynapse.dev.azuresynapse.net"); + let client = new ManagedPrivateEndpointsClient( + credential, + "https://mysynapse.dev.azuresynapse.net" + ); let list = await client.managedPrivateEndpoints.list("myvnet"); for await (let item of list) { console.log("item:", item); - } + } } ``` + ## Related projects - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) diff --git a/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js index d7c4da5c6839..3c741e71d5d6 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js +++ b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js @@ -26,7 +26,9 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps(), + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), cjs({ namedExports: { assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts index ee17cbb59aea..34b1c7baf6c3 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts @@ -58,11 +58,7 @@ export class ManagedPrivateEndpoints { yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { - result = await this._listNext( - managedVirtualNetworkName, - continuationToken, - options - ); + result = await this._listNext(managedVirtualNetworkName, continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } @@ -72,10 +68,7 @@ export class ManagedPrivateEndpoints { managedVirtualNetworkName: string, options?: coreHttp.OperationOptions ): AsyncIterableIterator { - for await (const page of this.listPagingPage( - managedVirtualNetworkName, - options - )) { + for await (const page of this.listPagingPage(managedVirtualNetworkName, options)) { yield* page; } } @@ -101,10 +94,7 @@ export class ManagedPrivateEndpoints { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - getOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); return result as ManagedPrivateEndpointsGetResponse; } catch (error) { span.setStatus({ @@ -212,10 +202,7 @@ export class ManagedPrivateEndpoints { options: updatedOptions }; try { - const result = await this.client.sendOperationRequest( - operationArguments, - listOperationSpec - ); + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); return result as ManagedPrivateEndpointsListResponse; } catch (error) { span.setStatus({ @@ -321,8 +308,7 @@ const deleteOperationSpec: coreHttp.OperationSpec = { serializer }; const listOperationSpec: coreHttp.OperationSpec = { - path: - "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", + path: "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", httpMethod: "GET", responses: { 200: { @@ -343,11 +329,7 @@ const listNextOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.apiVersion], - urlParameters: [ - Parameters.endpoint, - Parameters.managedVirtualNetworkName, - Parameters.nextLink - ], + urlParameters: [Parameters.endpoint, Parameters.managedVirtualNetworkName, Parameters.nextLink], headerParameters: [Parameters.accept], serializer }; diff --git a/sdk/synapse/synapse-monitoring/README.md b/sdk/synapse/synapse-monitoring/README.md index e24af2b09d58..6a3b8cc78e54 100644 --- a/sdk/synapse/synapse-monitoring/README.md +++ b/sdk/synapse/synapse-monitoring/README.md @@ -31,6 +31,7 @@ export async function main(): Promise { console.log("output:", output); } ``` + ## Related projects - [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) diff --git a/sdk/synapse/synapse-monitoring/rollup.config.js b/sdk/synapse/synapse-monitoring/rollup.config.js index c39e8ad35e9a..2b76f5847500 100644 --- a/sdk/synapse/synapse-monitoring/rollup.config.js +++ b/sdk/synapse/synapse-monitoring/rollup.config.js @@ -26,7 +26,9 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps(), + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), cjs({ namedExports: { assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], diff --git a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts index 16641c10119a..7dee9d6b5045 100644 --- a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts +++ b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts @@ -112,12 +112,7 @@ const getSqlJobQueryStringOperationSpec: coreHttp.OperationSpec = { bodyMapper: Mappers.SqlQueryStringDataModel } }, - queryParameters: [ - Parameters.apiVersion, - Parameters.filter, - Parameters.orderby, - Parameters.skip - ], + queryParameters: [Parameters.apiVersion, Parameters.filter, Parameters.orderby, Parameters.skip], urlParameters: [Parameters.endpoint], headerParameters: [Parameters.accept, Parameters.xMsClientRequestId], serializer diff --git a/sdk/synapse/synapse-spark/rollup.config.js b/sdk/synapse/synapse-spark/rollup.config.js index c497548aa119..b2b31142e226 100644 --- a/sdk/synapse/synapse-spark/rollup.config.js +++ b/sdk/synapse/synapse-spark/rollup.config.js @@ -26,7 +26,9 @@ const config = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ ` }, - plugins: [nodeResolve({ module: true }), sourcemaps(), + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), cjs({ namedExports: { assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts index 15d7daba57b2..507565abbb04 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -174,11 +174,7 @@ const getSparkBatchJobsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept], serializer }; @@ -192,11 +188,7 @@ const createSparkBatchJobOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkBatchJobOptions, queryParameters: [Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts index f475d080c114..2c0d38793843 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -358,11 +358,7 @@ const getSparkSessionsOperationSpec: coreHttp.OperationSpec = { } }, queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept], serializer }; @@ -376,11 +372,7 @@ const createSparkSessionOperationSpec: coreHttp.OperationSpec = { }, requestBody: Parameters.sparkSessionOptions, queryParameters: [Parameters.detailed], - urlParameters: [ - Parameters.endpoint, - Parameters.livyApiVersion, - Parameters.sparkPoolName - ], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer From 13863cdc4cdfef065521bbbe32dba1033eb3a7b7 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 9 Dec 2020 11:27:27 +1300 Subject: [PATCH 19/28] Remove prepack --- sdk/synapse/synapse-access-control/package.json | 1 - sdk/synapse/synapse-artifacts/package.json | 1 - sdk/synapse/synapse-managed-private-endpoints/package.json | 1 - sdk/synapse/synapse-monitoring/package.json | 1 - sdk/synapse/synapse-spark/package.json | 1 - 5 files changed, 5 deletions(-) diff --git a/sdk/synapse/synapse-access-control/package.json b/sdk/synapse/synapse-access-control/package.json index ea8e632a5442..9c55c90f1780 100644 --- a/sdk/synapse/synapse-access-control/package.json +++ b/sdk/synapse/synapse-access-control/package.json @@ -55,7 +55,6 @@ "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-access-control.js.map'\" -o ./dist/synapse-access-control.min.js ./dist/synapse-access-control.js", - "prepack": "npm install && npm run build", "pack": "npm pack 2>&1", "build:test": "echo skip", "unit-test:browser": "echo skipped", diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index d367ec99840b..03da2073f7df 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -56,7 +56,6 @@ "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-artifacts.js.map'\" -o ./dist/synapse-artifacts.min.js ./dist/synapse-artifacts.js", - "prepack": "npm install && npm run build", "pack": "npm pack 2>&1", "build:test": "echo skip", "unit-test:browser": "echo skipped", diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index 7908df34825a..a156203d280b 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -55,7 +55,6 @@ "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-private-endpoints.js.map'\" -o ./dist/synapse-managed-private-endpoints.min.js ./dist/synapse-managed-private-endpoints.js", - "prepack": "npm install && npm run build", "build:test": "echo skip", "pack": "npm pack 2>&1", "unit-test:browser": "echo skipped", diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index 4d87bda95ce9..d29cbc31d3c8 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -54,7 +54,6 @@ "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-monitoring.js.map'\" -o ./dist/synapse-monitoring.min.js ./dist/synapse-monitoring.js", - "prepack": "npm install && npm run build", "pack": "npm pack 2>&1", "build:test": "echo skip", "unit-test:browser": "echo skipped", diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 1177b257aa0f..fb54545bcc75 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -54,7 +54,6 @@ "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-spark.js.map'\" -o ./dist/synapse-spark.min.js ./dist/synapse-spark.js", - "prepack": "npm install && npm run build", "pack": "npm pack 2>&1", "build:test": "echo skip", "unit-test:browser": "echo skipped", From 3b5e6c6f6de0c03a7c3371140e1c9174adccc3a0 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 9 Dec 2020 12:09:04 +1300 Subject: [PATCH 20/28] Add sdk-type --- sdk/synapse/synapse-access-control/package.json | 1 + sdk/synapse/synapse-artifacts/package.json | 1 + sdk/synapse/synapse-managed-private-endpoints/package.json | 1 + sdk/synapse/synapse-monitoring/package.json | 1 + sdk/synapse/synapse-spark/package.json | 1 + 5 files changed, 5 insertions(+) diff --git a/sdk/synapse/synapse-access-control/package.json b/sdk/synapse/synapse-access-control/package.json index 9c55c90f1780..a1669a3422e8 100644 --- a/sdk/synapse/synapse-access-control/package.json +++ b/sdk/synapse/synapse-access-control/package.json @@ -2,6 +2,7 @@ "name": "@azure/synapse-access-control", "author": "Microsoft Corporation", "description": "A generated SDK for AccessControlClient.", + "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { "@azure/core-paging": "^1.1.1", diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 03da2073f7df..45d3025d12c3 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -2,6 +2,7 @@ "name": "@azure/synapse-artifacts", "author": "Microsoft Corporation", "description": "A generated SDK for ArtifactsClient.", + "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { "@azure/core-lro": "^1.0.2", diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index a156203d280b..70aee874b24d 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -2,6 +2,7 @@ "name": "@azure/synapse-managed-private-endpoints", "author": "Microsoft Corporation", "description": "A generated SDK for ManagedPrivateEndpointsClient.", + "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { "@azure/core-paging": "^1.1.1", diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index d29cbc31d3c8..19aed5509845 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -2,6 +2,7 @@ "name": "@azure/synapse-monitoring", "author": "Microsoft Corporation", "description": "A generated SDK for MonitoringClient.", + "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { "@azure/core-http": "^1.2.0", diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index fb54545bcc75..3086bc1d49bf 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -2,6 +2,7 @@ "name": "@azure/synapse-spark", "author": "Microsoft Corporation", "description": "A generated SDK for SparkClient.", + "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { "@azure/core-http": "^1.2.0", From 539c12699a7da70cad3f7376e0221d2bc1b91734 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 9 Dec 2020 13:19:54 +1300 Subject: [PATCH 21/28] Add lint --- sdk/synapse/synapse-access-control/package.json | 1 + sdk/synapse/synapse-artifacts/package.json | 1 + sdk/synapse/synapse-managed-private-endpoints/package.json | 1 + sdk/synapse/synapse-monitoring/package.json | 1 + sdk/synapse/synapse-spark/package.json | 1 + 5 files changed, 5 insertions(+) diff --git a/sdk/synapse/synapse-access-control/package.json b/sdk/synapse/synapse-access-control/package.json index a1669a3422e8..a53773b3cf57 100644 --- a/sdk/synapse/synapse-access-control/package.json +++ b/sdk/synapse/synapse-access-control/package.json @@ -58,6 +58,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-access-control.js.map'\" -o ./dist/synapse-access-control.min.js ./dist/synapse-access-control.js", "pack": "npm pack 2>&1", "build:test": "echo skip", + "lint": "eslint package.json api-extractor.json src --ext .ts", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 45d3025d12c3..fb911a0be05a 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -59,6 +59,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-artifacts.js.map'\" -o ./dist/synapse-artifacts.min.js ./dist/synapse-artifacts.js", "pack": "npm pack 2>&1", "build:test": "echo skip", + "lint": "eslint package.json api-extractor.json src --ext .ts", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index 70aee874b24d..874bf14730b0 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -58,6 +58,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-private-endpoints.js.map'\" -o ./dist/synapse-managed-private-endpoints.min.js ./dist/synapse-managed-private-endpoints.js", "build:test": "echo skip", "pack": "npm pack 2>&1", + "lint": "eslint package.json api-extractor.json src --ext .ts", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index 19aed5509845..93ef146770da 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -57,6 +57,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-monitoring.js.map'\" -o ./dist/synapse-monitoring.min.js ./dist/synapse-monitoring.js", "pack": "npm pack 2>&1", "build:test": "echo skip", + "lint": "eslint package.json api-extractor.json src --ext .ts", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 3086bc1d49bf..0b86e14eb93e 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -57,6 +57,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-spark.js.map'\" -o ./dist/synapse-spark.min.js ./dist/synapse-spark.js", "pack": "npm pack 2>&1", "build:test": "echo skip", + "lint": "eslint package.json api-extractor.json src --ext .ts", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", From 000d04fce76e823a7bcb8a1491e95492cdd845f2 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Wed, 9 Dec 2020 13:53:33 +1300 Subject: [PATCH 22/28] Skip lint for now --- sdk/synapse/synapse-access-control/package.json | 4 +++- sdk/synapse/synapse-artifacts/package.json | 4 +++- sdk/synapse/synapse-managed-private-endpoints/package.json | 4 +++- sdk/synapse/synapse-monitoring/package.json | 4 +++- sdk/synapse/synapse-spark/package.json | 4 +++- 5 files changed, 15 insertions(+), 5 deletions(-) diff --git a/sdk/synapse/synapse-access-control/package.json b/sdk/synapse/synapse-access-control/package.json index a53773b3cf57..20cc8b57a148 100644 --- a/sdk/synapse/synapse-access-control/package.json +++ b/sdk/synapse/synapse-access-control/package.json @@ -22,6 +22,8 @@ "types": "./types/synapse-access-control.d.ts", "devDependencies": { "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", @@ -58,7 +60,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-access-control.js.map'\" -o ./dist/synapse-access-control.min.js ./dist/synapse-access-control.js", "pack": "npm pack 2>&1", "build:test": "echo skip", - "lint": "eslint package.json api-extractor.json src --ext .ts", + "lint": "echo skipped", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index fb911a0be05a..7bf800095f16 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -24,6 +24,8 @@ "types": "./types/synapse-artifacts.d.ts", "devDependencies": { "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", @@ -59,7 +61,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-artifacts.js.map'\" -o ./dist/synapse-artifacts.min.js ./dist/synapse-artifacts.js", "pack": "npm pack 2>&1", "build:test": "echo skip", - "lint": "eslint package.json api-extractor.json src --ext .ts", + "lint": "echo skipped", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index 874bf14730b0..48301eab6079 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -23,6 +23,8 @@ "types": "./types/synapse-managed-private-endpoints.d.ts", "devDependencies": { "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", @@ -58,7 +60,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-private-endpoints.js.map'\" -o ./dist/synapse-managed-private-endpoints.min.js ./dist/synapse-managed-private-endpoints.js", "build:test": "echo skip", "pack": "npm pack 2>&1", - "lint": "eslint package.json api-extractor.json src --ext .ts", + "lint": "echo skipped", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index 93ef146770da..4f8674337f9c 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -22,6 +22,8 @@ "types": "./types/synapse-monitoring.d.ts", "devDependencies": { "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", @@ -57,7 +59,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-monitoring.js.map'\" -o ./dist/synapse-monitoring.min.js ./dist/synapse-monitoring.js", "pack": "npm pack 2>&1", "build:test": "echo skip", - "lint": "eslint package.json api-extractor.json src --ext .ts", + "lint": "echo skipped", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index 0b86e14eb93e..a32a35df5c8e 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -22,6 +22,8 @@ "types": "./types/synapse-spark.d.ts", "devDependencies": { "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", "rollup": "^1.16.3", "rollup-plugin-node-resolve": "^3.4.0", "rollup-plugin-sourcemaps": "^0.4.2", @@ -57,7 +59,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-spark.js.map'\" -o ./dist/synapse-spark.min.js ./dist/synapse-spark.js", "pack": "npm pack 2>&1", "build:test": "echo skip", - "lint": "eslint package.json api-extractor.json src --ext .ts", + "lint": "echo skipped", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", From d058a8ff62a9b42e6f8efefc4fe7d109236c0cdb Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Thu, 10 Dec 2020 07:45:22 +1300 Subject: [PATCH 23/28] Add copyright and improve generation --- sdk/keyvault/keyvault-secrets/package.json | 1 - .../synapse-access-control/CHANGELOG.md | 5 + .../synapse-access-control/api-extractor.json | 2 +- .../synapse-access-control/package.json | 32 +- .../review/synapse-access-control.api.md | 15 +- .../synapse-access-control/rollup.config.js | 6 +- .../src/accessControlClient.ts | 309 +++++++++--------- .../src/accessControlClientContext.ts | 2 + .../synapse-access-control/src/index.ts | 2 + .../src/models/index.ts | 4 +- .../src/models/mappers.ts | 2 + .../src/models/parameters.ts | 2 + .../synapse-access-control/src/tracing.ts | 2 + .../synapse-access-control/tsconfig.json | 2 +- sdk/synapse/synapse-artifacts/CHANGELOG.md | 5 + .../synapse-artifacts/api-extractor.json | 2 +- sdk/synapse/synapse-artifacts/package.json | 24 +- .../review/synapse-artifacts.api.md | 2 +- .../synapse-artifacts/rollup.config.js | 6 +- .../synapse-artifacts/src/artifactsClient.ts | 2 + .../src/artifactsClientContext.ts | 2 + sdk/synapse/synapse-artifacts/src/index.ts | 2 + .../src/lro/azureAsyncOperationStrategy.ts | 2 + .../src/lro/bodyPollingStrategy.ts | 2 + .../synapse-artifacts/src/lro/constants.ts | 2 + .../synapse-artifacts/src/lro/index.ts | 2 + .../src/lro/locationStrategy.ts | 2 + .../synapse-artifacts/src/lro/lroPolicy.ts | 2 + .../synapse-artifacts/src/lro/lroPoller.ts | 2 + .../synapse-artifacts/src/lro/models.ts | 2 + .../synapse-artifacts/src/lro/operation.ts | 2 + .../src/lro/passthroughStrategy.ts | 2 + .../synapse-artifacts/src/lro/requestUtils.ts | 2 + .../synapse-artifacts/src/models/index.ts | 2 + .../synapse-artifacts/src/models/mappers.ts | 2 + .../src/models/parameters.ts | 2 + .../src/operations/bigDataPools.ts | 2 + .../src/operations/dataFlow.ts | 2 + .../src/operations/dataFlowDebugSession.ts | 2 + .../src/operations/dataset.ts | 2 + .../synapse-artifacts/src/operations/index.ts | 2 + .../src/operations/integrationRuntimes.ts | 2 + .../src/operations/linkedService.ts | 2 + .../src/operations/notebook.ts | 2 + .../src/operations/pipeline.ts | 2 + .../src/operations/pipelineRun.ts | 2 + .../src/operations/sparkJobDefinition.ts | 2 + .../src/operations/sqlPools.ts | 2 + .../src/operations/sqlScript.ts | 2 + .../src/operations/trigger.ts | 2 + .../src/operations/triggerRun.ts | 2 + .../src/operations/workspace.ts | 2 + .../operations/workspaceGitRepoManagement.ts | 2 + sdk/synapse/synapse-artifacts/src/tracing.ts | 2 + sdk/synapse/synapse-artifacts/tsconfig.json | 2 +- .../CHANGELOG.md | 5 + .../api-extractor.json | 2 +- .../package.json | 24 +- .../rollup.config.js | 6 +- .../src/index.ts | 2 + .../src/managedPrivateEndpointsClient.ts | 2 + .../managedPrivateEndpointsClientContext.ts | 2 + .../src/models/index.ts | 2 + .../src/models/mappers.ts | 2 + .../src/models/parameters.ts | 2 + .../src/operations/index.ts | 2 + .../src/operations/managedPrivateEndpoints.ts | 2 + .../src/tracing.ts | 2 + .../tsconfig.json | 2 +- sdk/synapse/synapse-monitoring/CHANGELOG.md | 5 + .../synapse-monitoring/api-extractor.json | 2 +- sdk/synapse/synapse-monitoring/package.json | 24 +- .../synapse-monitoring/rollup.config.js | 6 +- sdk/synapse/synapse-monitoring/src/index.ts | 2 + .../synapse-monitoring/src/models/index.ts | 2 + .../synapse-monitoring/src/models/mappers.ts | 2 + .../src/models/parameters.ts | 2 + .../src/monitoringClient.ts | 2 + .../src/monitoringClientContext.ts | 2 + .../src/operations/index.ts | 2 + .../src/operations/monitoring.ts | 2 + sdk/synapse/synapse-monitoring/src/tracing.ts | 2 + sdk/synapse/synapse-monitoring/tsconfig.json | 2 +- sdk/synapse/synapse-spark/CHANGELOG.md | 5 + sdk/synapse/synapse-spark/api-extractor.json | 2 +- sdk/synapse/synapse-spark/package.json | 24 +- sdk/synapse/synapse-spark/rollup.config.js | 6 +- sdk/synapse/synapse-spark/src/index.ts | 2 + sdk/synapse/synapse-spark/src/models/index.ts | 2 + .../synapse-spark/src/models/mappers.ts | 2 + .../synapse-spark/src/models/parameters.ts | 2 + .../synapse-spark/src/operations/index.ts | 2 + .../src/operations/sparkBatch.ts | 2 + .../src/operations/sparkSession.ts | 2 + sdk/synapse/synapse-spark/src/sparkClient.ts | 2 + .../synapse-spark/src/sparkClientContext.ts | 2 + sdk/synapse/synapse-spark/src/tracing.ts | 2 + sdk/synapse/synapse-spark/tsconfig.json | 2 +- 98 files changed, 419 insertions(+), 251 deletions(-) create mode 100644 sdk/synapse/synapse-access-control/CHANGELOG.md create mode 100644 sdk/synapse/synapse-artifacts/CHANGELOG.md create mode 100644 sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md create mode 100644 sdk/synapse/synapse-monitoring/CHANGELOG.md create mode 100644 sdk/synapse/synapse-spark/CHANGELOG.md diff --git a/sdk/keyvault/keyvault-secrets/package.json b/sdk/keyvault/keyvault-secrets/package.json index 5521947372bb..928709dbaf73 100644 --- a/sdk/keyvault/keyvault-secrets/package.json +++ b/sdk/keyvault/keyvault-secrets/package.json @@ -10,7 +10,6 @@ "keywords": [ "node", "azure", - "cloud", "typescript", "browser", "isomorphic", diff --git a/sdk/synapse/synapse-access-control/CHANGELOG.md b/sdk/synapse/synapse-access-control/CHANGELOG.md new file mode 100644 index 000000000000..f3a1bab8f0ee --- /dev/null +++ b/sdk/synapse/synapse-access-control/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 + +- Initial release diff --git a/sdk/synapse/synapse-access-control/api-extractor.json b/sdk/synapse/synapse-access-control/api-extractor.json index 1adb6d7782e8..3bc7a3670741 100644 --- a/sdk/synapse/synapse-access-control/api-extractor.json +++ b/sdk/synapse/synapse-access-control/api-extractor.json @@ -1,6 +1,6 @@ { "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", - "mainEntryPointFilePath": "./esm/index.d.ts", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", "docModel": { "enabled": true }, "apiReport": { "enabled": true, "reportFolder": "./review" }, "dtsRollup": { diff --git a/sdk/synapse/synapse-access-control/package.json b/sdk/synapse/synapse-access-control/package.json index 20cc8b57a148..9c3631e7d089 100644 --- a/sdk/synapse/synapse-access-control/package.json +++ b/sdk/synapse/synapse-access-control/package.json @@ -2,6 +2,8 @@ "name": "@azure/synapse-access-control", "author": "Microsoft Corporation", "description": "A generated SDK for AccessControlClient.", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-access-control/README.md", + "repository": "github:Azure/azure-sdk-for-js", "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { @@ -14,11 +16,18 @@ "azure", "typescript", "browser", + "cloud", "isomorphic" ], "license": "MIT", - "main": "./dist/synapse-access-control.js", - "module": "./esm/index.js", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", "types": "./types/synapse-access-control.d.ts", "devDependencies": { "typescript": "~3.9.3", @@ -32,11 +41,6 @@ "@opentelemetry/api": "^0.10.2", "@microsoft/api-extractor": "7.7.11" }, - "homepage": "https://github.com/Azure/azure-sdk-for-js", - "repository": { - "type": "git", - "url": "https://github.com/Azure/azure-sdk-for-js.git" - }, "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, @@ -45,11 +49,10 @@ "dist/**/*.js.map", "dist/**/*.d.ts", "dist/**/*.d.ts.map", - "esm/**/*.js", - "esm/**/*.js.map", - "esm/**/*.d.ts", - "esm/**/*.d.ts.map", - "src/**/*.ts", + "dist-esm/**/*.js", + "dist-esm/**/*.js.map", + "dist-esm/**/*.d.ts", + "dist-esm/**/*.d.ts.map", "types/synapse-access-control.d.ts", "README.md", "rollup.config.js", @@ -57,10 +60,11 @@ ], "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", - "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-access-control.js.map'\" -o ./dist/synapse-access-control.min.js ./dist/synapse-access-control.js", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", "pack": "npm pack 2>&1", + "test": "echo skip", "build:test": "echo skip", - "lint": "echo skipped", + "lint": "echo skip", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-access-control/review/synapse-access-control.api.md b/sdk/synapse/synapse-access-control/review/synapse-access-control.api.md index c998154e4a9e..70d8b36d1919 100644 --- a/sdk/synapse/synapse-access-control/review/synapse-access-control.api.md +++ b/sdk/synapse/synapse-access-control/review/synapse-access-control.api.md @@ -9,8 +9,7 @@ import { PagedAsyncIterableIterator } from '@azure/core-paging'; // @public (undocumented) export class AccessControlClient extends AccessControlClientContext { - constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: AccessControlClientOptionalParams); - createRoleAssignment(createRoleAssignmentOptions: RoleAssignmentOptions, options?: coreHttp.OperationOptions): Promise; + createRoleAssignment(createRoleAssignmentOptions: CreateRoleAssignmentOptions, options?: coreHttp.OperationOptions): Promise; deleteRoleAssignmentById(roleAssignmentId: string, options?: coreHttp.OperationOptions): Promise; getCallerRoleAssignments(options?: coreHttp.OperationOptions): Promise; getRoleAssignmentById(roleAssignmentId: string, options?: coreHttp.OperationOptions): Promise; @@ -104,6 +103,12 @@ export interface AccessControlClientOptionalParams extends coreHttp.ServiceClien endpoint?: string; } +// @public +export interface CreateRoleAssignmentOptions { + principalId: string; + roleId: string; +} + // @public export interface ErrorContract { error?: ErrorResponse; @@ -138,12 +143,6 @@ export interface RoleAssignmentDetails { roleId?: string; } -// @public -export interface RoleAssignmentOptions { - principalId: string; - roleId: string; -} - // @public export interface RolesListResponse { nextLink?: string; diff --git a/sdk/synapse/synapse-access-control/rollup.config.js b/sdk/synapse/synapse-access-control/rollup.config.js index a5d620e63cc1..d5699103e5d1 100644 --- a/sdk/synapse/synapse-access-control/rollup.config.js +++ b/sdk/synapse/synapse-access-control/rollup.config.js @@ -7,11 +7,11 @@ import cjs from "@rollup/plugin-commonjs"; * @type {rollup.RollupFileOptions} */ const config = { - input: "./esm/accessControlClient.js", + input: "./dist-esm/accessControlClient.js", external: ["@azure/core-http", "@azure/core-arm"], output: { - file: "./dist/synapse-access-control.js", - format: "umd", + file: "./dist/index.js", + format: "cjs", name: "Azure.SynapseAccessControl", sourcemap: true, globals: { diff --git a/sdk/synapse/synapse-access-control/src/accessControlClient.ts b/sdk/synapse/synapse-access-control/src/accessControlClient.ts index fafbf353df58..9ce41622d5e6 100644 --- a/sdk/synapse/synapse-access-control/src/accessControlClient.ts +++ b/sdk/synapse/synapse-access-control/src/accessControlClient.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { CanonicalCode } from "@opentelemetry/api"; @@ -6,11 +8,10 @@ import * as Parameters from "./models/parameters"; import * as Mappers from "./models/mappers"; import { AccessControlClientContext } from "./accessControlClientContext"; import { - AccessControlClientOptionalParams, SynapseRole, AccessControlClientGetRoleDefinitionsResponse, AccessControlClientGetRoleDefinitionByIdResponse, - RoleAssignmentOptions, + CreateRoleAssignmentOptions, AccessControlClientCreateRoleAssignmentResponse, AccessControlClientGetRoleAssignmentsOptionalParams, AccessControlClientGetRoleAssignmentsResponse, @@ -19,22 +20,151 @@ import { AccessControlClientGetRoleDefinitionsNextResponse } from "./models"; -export class AccessControlClient extends AccessControlClientContext { - /** - * Initializes a new instance of the AccessControlClient class. - * @param credentials Subscription credentials which uniquely identify client subscription. - * @param endpoint The workspace development endpoint, for example - * https://myworkspace.dev.azuresynapse.net. - * @param options The parameter options - */ - constructor( - credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, - endpoint: string, - options?: AccessControlClientOptionalParams - ) { - super(credentials, endpoint, options); - } +// Operation Specifications +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getRoleDefinitionsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roles", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RolesListResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getRoleDefinitionByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roles/{roleId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SynapseRole + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleId], + headerParameters: [Parameters.accept], + serializer +}; +const createRoleAssignmentOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.RoleAssignmentDetails + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + requestBody: Parameters.createRoleAssignmentOptions, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "RoleAssignmentDetails" } + } + } + }, + headersMapper: Mappers.AccessControlClientGetRoleAssignmentsHeaders + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.continuationToken], + serializer +}; +const getRoleAssignmentByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments/{roleAssignmentId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RoleAssignmentDetails + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleAssignmentId], + headerParameters: [Parameters.accept], + serializer +}; +const deleteRoleAssignmentByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments/{roleAssignmentId}", + httpMethod: "DELETE", + responses: { + 200: {}, + 204: {}, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleAssignmentId], + headerParameters: [Parameters.accept], + serializer +}; +const getCallerRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/getMyAssignedRoles", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Sequence", element: { type: { name: "String" } } } + } + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getRoleDefinitionsNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RolesListResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; + +export class AccessControlClient extends AccessControlClientContext { /** * List roles. * @param options The options parameters. @@ -148,7 +278,7 @@ export class AccessControlClient extends AccessControlClientContext { * @param options The options parameters. */ async createRoleAssignment( - createRoleAssignmentOptions: RoleAssignmentOptions, + createRoleAssignmentOptions: CreateRoleAssignmentOptions, options?: coreHttp.OperationOptions ): Promise { const { span, updatedOptions } = createSpan( @@ -340,146 +470,3 @@ export class AccessControlClient extends AccessControlClientContext { } } } -// Operation Specifications - -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); - -const getRoleDefinitionsOperationSpec: coreHttp.OperationSpec = { - path: "/rbac/roles", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.RolesListResponse - }, - default: { - bodyMapper: Mappers.ErrorContract - } - }, - queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint], - headerParameters: [Parameters.accept], - serializer -}; -const getRoleDefinitionByIdOperationSpec: coreHttp.OperationSpec = { - path: "/rbac/roles/{roleId}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.SynapseRole - }, - default: { - bodyMapper: Mappers.ErrorContract - } - }, - queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.roleId], - headerParameters: [Parameters.accept], - serializer -}; -const createRoleAssignmentOperationSpec: coreHttp.OperationSpec = { - path: "/rbac/roleAssignments", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: Mappers.RoleAssignmentDetails - }, - default: { - bodyMapper: Mappers.ErrorContract - } - }, - requestBody: Parameters.createRoleAssignmentOptions, - queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint], - headerParameters: [Parameters.accept, Parameters.contentType], - mediaType: "json", - serializer -}; -const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { - path: "/rbac/roleAssignments", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { - name: "Sequence", - element: { - type: { name: "Composite", className: "RoleAssignmentDetails" } - } - } - }, - headersMapper: Mappers.AccessControlClientGetRoleAssignmentsHeaders - }, - default: { - bodyMapper: Mappers.ErrorContract - } - }, - queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], - urlParameters: [Parameters.endpoint], - headerParameters: [Parameters.accept, Parameters.continuationToken], - serializer -}; -const getRoleAssignmentByIdOperationSpec: coreHttp.OperationSpec = { - path: "/rbac/roleAssignments/{roleAssignmentId}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.RoleAssignmentDetails - }, - default: { - bodyMapper: Mappers.ErrorContract - } - }, - queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.roleAssignmentId], - headerParameters: [Parameters.accept], - serializer -}; -const deleteRoleAssignmentByIdOperationSpec: coreHttp.OperationSpec = { - path: "/rbac/roleAssignments/{roleAssignmentId}", - httpMethod: "DELETE", - responses: { - 200: {}, - 204: {}, - default: { - bodyMapper: Mappers.ErrorContract - } - }, - queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.roleAssignmentId], - headerParameters: [Parameters.accept], - serializer -}; -const getCallerRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { - path: "/rbac/getMyAssignedRoles", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: { - type: { name: "Sequence", element: { type: { name: "String" } } } - } - }, - default: { - bodyMapper: Mappers.ErrorContract - } - }, - queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint], - headerParameters: [Parameters.accept], - serializer -}; -const getRoleDefinitionsNextOperationSpec: coreHttp.OperationSpec = { - path: "{nextLink}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: Mappers.RolesListResponse - }, - default: { - bodyMapper: Mappers.ErrorContract - } - }, - queryParameters: [Parameters.apiVersion], - urlParameters: [Parameters.endpoint, Parameters.nextLink], - headerParameters: [Parameters.accept], - serializer -}; diff --git a/sdk/synapse/synapse-access-control/src/accessControlClientContext.ts b/sdk/synapse/synapse-access-control/src/accessControlClientContext.ts index cd145a860b33..191b933fe6a6 100644 --- a/sdk/synapse/synapse-access-control/src/accessControlClientContext.ts +++ b/sdk/synapse/synapse-access-control/src/accessControlClientContext.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { AccessControlClientOptionalParams } from "./models"; diff --git a/sdk/synapse/synapse-access-control/src/index.ts b/sdk/synapse/synapse-access-control/src/index.ts index 395119853995..21d9a7056568 100644 --- a/sdk/synapse/synapse-access-control/src/index.ts +++ b/sdk/synapse/synapse-access-control/src/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /// export * from "./models"; export { AccessControlClient } from "./accessControlClient"; diff --git a/sdk/synapse/synapse-access-control/src/models/index.ts b/sdk/synapse/synapse-access-control/src/models/index.ts index e651fb487f46..2810fedee70b 100644 --- a/sdk/synapse/synapse-access-control/src/models/index.ts +++ b/sdk/synapse/synapse-access-control/src/models/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; /** @@ -58,7 +60,7 @@ export interface ErrorDetail { /** * Role Assignment request details */ -export interface RoleAssignmentOptions { +export interface CreateRoleAssignmentOptions { /** * Role ID of the Synapse Built-In Role */ diff --git a/sdk/synapse/synapse-access-control/src/models/mappers.ts b/sdk/synapse/synapse-access-control/src/models/mappers.ts index a7b674d7d875..18f6ab27e9c9 100644 --- a/sdk/synapse/synapse-access-control/src/models/mappers.ts +++ b/sdk/synapse/synapse-access-control/src/models/mappers.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; export const RolesListResponse: coreHttp.CompositeMapper = { diff --git a/sdk/synapse/synapse-access-control/src/models/parameters.ts b/sdk/synapse/synapse-access-control/src/models/parameters.ts index 851797e88961..9d0b4b306b5d 100644 --- a/sdk/synapse/synapse-access-control/src/models/parameters.ts +++ b/sdk/synapse/synapse-access-control/src/models/parameters.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { OperationParameter, OperationURLParameter, diff --git a/sdk/synapse/synapse-access-control/src/tracing.ts b/sdk/synapse/synapse-access-control/src/tracing.ts index 164b82721674..bd5964daaed0 100644 --- a/sdk/synapse/synapse-access-control/src/tracing.ts +++ b/sdk/synapse/synapse-access-control/src/tracing.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { createSpanFunction } from "@azure/core-http"; export const createSpan = createSpanFunction({ diff --git a/sdk/synapse/synapse-access-control/tsconfig.json b/sdk/synapse/synapse-access-control/tsconfig.json index 0290d6707a44..d43efedfc9bc 100644 --- a/sdk/synapse/synapse-access-control/tsconfig.json +++ b/sdk/synapse/synapse-access-control/tsconfig.json @@ -12,7 +12,7 @@ "preserveConstEnums": true, "lib": ["es6", "dom"], "declaration": true, - "outDir": "./esm", + "outDir": "./dist-esm", "importHelpers": true }, "include": ["./src/**/*.ts"], diff --git a/sdk/synapse/synapse-artifacts/CHANGELOG.md b/sdk/synapse/synapse-artifacts/CHANGELOG.md new file mode 100644 index 000000000000..f3a1bab8f0ee --- /dev/null +++ b/sdk/synapse/synapse-artifacts/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 + +- Initial release diff --git a/sdk/synapse/synapse-artifacts/api-extractor.json b/sdk/synapse/synapse-artifacts/api-extractor.json index 80612d4592f6..a5982c5913e1 100644 --- a/sdk/synapse/synapse-artifacts/api-extractor.json +++ b/sdk/synapse/synapse-artifacts/api-extractor.json @@ -1,6 +1,6 @@ { "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", - "mainEntryPointFilePath": "./esm/index.d.ts", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", "docModel": { "enabled": true }, "apiReport": { "enabled": true, "reportFolder": "./review" }, "dtsRollup": { diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 7bf800095f16..8644dea6744d 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -4,6 +4,8 @@ "description": "A generated SDK for ArtifactsClient.", "sdk-type": "client", "version": "1.0.0-beta.1", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-artifacts/README.md", + "repository": "github:Azure/azure-sdk-for-js", "dependencies": { "@azure/core-lro": "^1.0.2", "@azure/core-paging": "^1.1.1", @@ -16,11 +18,18 @@ "azure", "typescript", "browser", + "cloud", "isomorphic" ], "license": "MIT", - "main": "./dist/synapse-artifacts.js", - "module": "./esm/index.js", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", "types": "./types/synapse-artifacts.d.ts", "devDependencies": { "typescript": "~3.9.3", @@ -33,11 +42,6 @@ "uglify-js": "^3.4.9", "@microsoft/api-extractor": "7.7.11" }, - "homepage": "https://github.com/Azure/azure-sdk-for-js", - "repository": { - "type": "git", - "url": "https://github.com/Azure/azure-sdk-for-js.git" - }, "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, @@ -50,7 +54,6 @@ "esm/**/*.js.map", "esm/**/*.d.ts", "esm/**/*.d.ts.map", - "src/**/*.ts", "types/synapse-artifacts.d.ts", "README.md", "rollup.config.js", @@ -58,10 +61,11 @@ ], "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", - "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-artifacts.js.map'\" -o ./dist/synapse-artifacts.min.js ./dist/synapse-artifacts.js", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", "pack": "npm pack 2>&1", "build:test": "echo skip", - "lint": "echo skipped", + "lint": "echo skip", + "test": "echo skip", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md index 54c72fff2076..f6236151ca3b 100644 --- a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -7025,7 +7025,7 @@ export type ZohoSource = TabularSource & { // Warnings were encountered during analysis: // -// src/models/index.ts:16836:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts +// src/models/index.ts:16838:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts // (No @packageDocumentation comment for this package) diff --git a/sdk/synapse/synapse-artifacts/rollup.config.js b/sdk/synapse/synapse-artifacts/rollup.config.js index f18ce9d35045..cf8b131d1c51 100644 --- a/sdk/synapse/synapse-artifacts/rollup.config.js +++ b/sdk/synapse/synapse-artifacts/rollup.config.js @@ -7,11 +7,11 @@ import cjs from "@rollup/plugin-commonjs"; * @type {rollup.RollupFileOptions} */ const config = { - input: "./esm/artifactsClient.js", + input: "./dist-esm/artifactsClient.js", external: ["@azure/core-http", "@azure/core-arm"], output: { - file: "./dist/synapse-artifacts.js", - format: "umd", + file: "./dist/index.js", + format: "cjs", name: "Azure.SynapseArtifacts", sourcemap: true, globals: { diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClient.ts b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts index cc724b6d34d4..6db744d7a122 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClient.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { LinkedService, diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts index 5fa0a6ad901c..d5215310dfbc 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { ArtifactsClientOptionalParams } from "./models"; import { lroPolicy } from "./lro"; diff --git a/sdk/synapse/synapse-artifacts/src/index.ts b/sdk/synapse/synapse-artifacts/src/index.ts index 53ba3a9039ab..e821f6a6a255 100644 --- a/sdk/synapse/synapse-artifacts/src/index.ts +++ b/sdk/synapse/synapse-artifacts/src/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /// export * from "./models"; export { ArtifactsClient } from "./artifactsClient"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index 0b5d4232590f..587f21b6a9ec 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { LROStrategy, BaseResult, diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index 62ed188e691e..f0f2736b3859 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { LROStrategy, BaseResult, LROOperationStep, LROSYM } from "./models"; import { OperationSpec } from "@azure/core-http"; import { terminalStates } from "./constants"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/constants.ts b/sdk/synapse/synapse-artifacts/src/lro/constants.ts index b0a9acd375b7..c04d09e73f80 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/constants.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/constants.ts @@ -1 +1,3 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. export const terminalStates = ["succeeded", "failed", "canceled", "cancelled"]; diff --git a/sdk/synapse/synapse-artifacts/src/lro/index.ts b/sdk/synapse/synapse-artifacts/src/lro/index.ts index a2a8e9c0e3a0..e67a5ee6fea2 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/index.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. export { shouldDeserializeLRO } from "./requestUtils"; export { createBodyPollingStrategy } from "./bodyPollingStrategy"; export { terminalStates } from "./constants"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index cfcfa8efd0a7..29a761e77093 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { BaseResult, LROOperationStep, LROStrategy, LROSYM } from "./models"; import { SendOperationFn } from "./lroPoller"; import { OperationSpec } from "@azure/core-http"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index e686401a5cf9..82d123e2949f 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { RequestPolicy, RequestPolicyOptions, diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index 72d979bd46a9..ebc20bd63868 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { Poller } from "@azure/core-lro"; import { OperationSpec, diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index ed0dd9132876..ebc1f1c3f0ff 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { OperationArguments, OperationSpec, diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index 9b37277e7b2a..379bcdcf5f63 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { BaseResult, LROOperationState, LROOperation, LROSYM } from "./models"; /** diff --git a/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts index 12e48123a60f..b58b641981f3 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { LROStrategy, BaseResult, LROOperationStep } from "./models"; /** diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index e3289b95905b..a3cd8b9d269b 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { HttpOperationResponse } from "@azure/core-http"; import { terminalStates } from "./constants"; import { LROResponseInfo } from "./models"; diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index e7a93ac658e0..9f6c5e23e798 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { LROSYM, LROResponseInfo } from "../lro/models"; diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index 424331700c33..a873ab2f23eb 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; export const LinkedServiceListResponse: coreHttp.CompositeMapper = { diff --git a/sdk/synapse/synapse-artifacts/src/models/parameters.ts b/sdk/synapse/synapse-artifacts/src/models/parameters.ts index 139f22e85a37..592ee559cd05 100644 --- a/sdk/synapse/synapse-artifacts/src/models/parameters.ts +++ b/sdk/synapse/synapse-artifacts/src/models/parameters.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { OperationParameter, OperationURLParameter, diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts index afddce7a118c..3d2f97d9e1df 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts index ba179c0fb658..0795b9e91ac3 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts index 7baff96e4392..42c6b2360da6 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts index 0f7ffebdb852..a250571d1e19 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/index.ts b/sdk/synapse/synapse-artifacts/src/operations/index.ts index 3463c5a1b9bc..680e8760f37f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/index.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. export * from "./linkedService"; export * from "./dataset"; export * from "./pipeline"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts index eb317043cae3..7c51643af23f 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts index ceda490199f1..8016b8281fc3 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts index 5fc6f3e34f0e..cf4935ef07c2 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts index e1b104d1b80e..f58c8c3929e2 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts index 19b249f180de..c3246425091e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts index 6ed3fffd75c4..c7e9f1cf8def 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts index 5096b9fc5557..38a4de821c44 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts index 6de62b75e088..af9671d7357e 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts index cd37c09ae3f0..e226d1d9d8b7 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts index 26c4c7e40f2a..6141640254e7 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts index 3577e40b04e1..cb9f8133a839 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts index 682d9bc7277e..7c01377f73ea 100644 --- a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-artifacts/src/tracing.ts b/sdk/synapse/synapse-artifacts/src/tracing.ts index a72edabac525..36ffdc4f7e26 100644 --- a/sdk/synapse/synapse-artifacts/src/tracing.ts +++ b/sdk/synapse/synapse-artifacts/src/tracing.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { createSpanFunction } from "@azure/core-http"; export const createSpan = createSpanFunction({ diff --git a/sdk/synapse/synapse-artifacts/tsconfig.json b/sdk/synapse/synapse-artifacts/tsconfig.json index 0290d6707a44..d43efedfc9bc 100644 --- a/sdk/synapse/synapse-artifacts/tsconfig.json +++ b/sdk/synapse/synapse-artifacts/tsconfig.json @@ -12,7 +12,7 @@ "preserveConstEnums": true, "lib": ["es6", "dom"], "declaration": true, - "outDir": "./esm", + "outDir": "./dist-esm", "importHelpers": true }, "include": ["./src/**/*.ts"], diff --git a/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md b/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md new file mode 100644 index 000000000000..f3a1bab8f0ee --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 + +- Initial release diff --git a/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json b/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json index 3c5b8d02834e..d9673b6381dd 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json +++ b/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json @@ -1,6 +1,6 @@ { "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", - "mainEntryPointFilePath": "./esm/index.d.ts", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", "docModel": { "enabled": true }, "apiReport": { "enabled": true, "reportFolder": "./review" }, "dtsRollup": { diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index 48301eab6079..ac6207ffb312 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -2,6 +2,8 @@ "name": "@azure/synapse-managed-private-endpoints", "author": "Microsoft Corporation", "description": "A generated SDK for ManagedPrivateEndpointsClient.", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-managed-private-endpoints/README.md", + "repository": "github:Azure/azure-sdk-for-js", "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { @@ -15,11 +17,18 @@ "azure", "typescript", "browser", + "cloud", "isomorphic" ], "license": "MIT", - "main": "./dist/synapse-managed-private-endpoints.js", - "module": "./esm/index.js", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", "types": "./types/synapse-managed-private-endpoints.d.ts", "devDependencies": { "typescript": "~3.9.3", @@ -32,11 +41,6 @@ "uglify-js": "^3.4.9", "@microsoft/api-extractor": "7.7.11" }, - "homepage": "https://github.com/Azure/azure-sdk-for-js", - "repository": { - "type": "git", - "url": "https://github.com/Azure/azure-sdk-for-js.git" - }, "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, @@ -49,7 +53,6 @@ "esm/**/*.js.map", "esm/**/*.d.ts", "esm/**/*.d.ts.map", - "src/**/*.ts", "types/synapse-managed-private-endpoints.d.ts", "README.md", "rollup.config.js", @@ -57,10 +60,11 @@ ], "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", - "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-managed-private-endpoints.js.map'\" -o ./dist/synapse-managed-private-endpoints.min.js ./dist/synapse-managed-private-endpoints.js", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", + "test": "echo skip", "build:test": "echo skip", "pack": "npm pack 2>&1", - "lint": "echo skipped", + "lint": "echo skip", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", diff --git a/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js index 3c741e71d5d6..938c844ba214 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js +++ b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js @@ -7,11 +7,11 @@ import cjs from "@rollup/plugin-commonjs"; * @type {rollup.RollupFileOptions} */ const config = { - input: "./esm/managedPrivateEndpointsClient.js", + input: "./dist-esm/managedPrivateEndpointsClient.js", external: ["@azure/core-http", "@azure/core-arm"], output: { - file: "./dist/synapse-managed-private-endpoints.js", - format: "umd", + file: "./dist/index.js", + format: "cjs", name: "Azure.SynapseManagedPrivateEndpoints", sourcemap: true, globals: { diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/index.ts index 1aaf83a23b90..b3996bea62b4 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/index.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /// export * from "./models"; export { ManagedPrivateEndpointsClient } from "./managedPrivateEndpointsClient"; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts index 1df69973a23f..a473d2ac17f3 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { ManagedPrivateEndpoints } from "./operations"; import { ManagedPrivateEndpointsClientContext } from "./managedPrivateEndpointsClientContext"; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts index f4bfcc76674a..fa65d3792b4c 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { ManagedPrivateEndpointsClientOptionalParams } from "./models"; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts index c8a5ef476bf7..315825b4985c 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; /** diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts index b4b7e559f43c..93fb506b8311 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; export const ManagedPrivateEndpoint: coreHttp.CompositeMapper = { diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts index 4ba8a4211224..08ddd10470ab 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { OperationParameter, OperationURLParameter, diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts index 32ab6933be8c..ca1a4a2ff79d 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts @@ -1 +1,3 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. export * from "./managedPrivateEndpoints"; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts index 34b1c7baf6c3..818a7ad579ad 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import { PagedAsyncIterableIterator } from "@azure/core-paging"; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts b/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts index 9820227dc43c..ec5a2da97e89 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { createSpanFunction } from "@azure/core-http"; export const createSpan = createSpanFunction({ diff --git a/sdk/synapse/synapse-managed-private-endpoints/tsconfig.json b/sdk/synapse/synapse-managed-private-endpoints/tsconfig.json index 0290d6707a44..d43efedfc9bc 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/tsconfig.json +++ b/sdk/synapse/synapse-managed-private-endpoints/tsconfig.json @@ -12,7 +12,7 @@ "preserveConstEnums": true, "lib": ["es6", "dom"], "declaration": true, - "outDir": "./esm", + "outDir": "./dist-esm", "importHelpers": true }, "include": ["./src/**/*.ts"], diff --git a/sdk/synapse/synapse-monitoring/CHANGELOG.md b/sdk/synapse/synapse-monitoring/CHANGELOG.md new file mode 100644 index 000000000000..f3a1bab8f0ee --- /dev/null +++ b/sdk/synapse/synapse-monitoring/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 + +- Initial release diff --git a/sdk/synapse/synapse-monitoring/api-extractor.json b/sdk/synapse/synapse-monitoring/api-extractor.json index bee41e4ee6ab..7433f42f5c3f 100644 --- a/sdk/synapse/synapse-monitoring/api-extractor.json +++ b/sdk/synapse/synapse-monitoring/api-extractor.json @@ -1,6 +1,6 @@ { "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", - "mainEntryPointFilePath": "./esm/index.d.ts", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", "docModel": { "enabled": true }, "apiReport": { "enabled": true, "reportFolder": "./review" }, "dtsRollup": { diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json index 4f8674337f9c..ec9d2bdf49e4 100644 --- a/sdk/synapse/synapse-monitoring/package.json +++ b/sdk/synapse/synapse-monitoring/package.json @@ -2,6 +2,8 @@ "name": "@azure/synapse-monitoring", "author": "Microsoft Corporation", "description": "A generated SDK for MonitoringClient.", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-monitoring/README.md", + "repository": "github:Azure/azure-sdk-for-js", "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { @@ -14,11 +16,18 @@ "azure", "typescript", "browser", + "cloud", "isomorphic" ], "license": "MIT", - "main": "./dist/synapse-monitoring.js", - "module": "./esm/index.js", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", "types": "./types/synapse-monitoring.d.ts", "devDependencies": { "typescript": "~3.9.3", @@ -31,11 +40,6 @@ "uglify-js": "^3.4.9", "@microsoft/api-extractor": "7.7.11" }, - "homepage": "https://github.com/Azure/azure-sdk-for-js", - "repository": { - "type": "git", - "url": "https://github.com/Azure/azure-sdk-for-js.git" - }, "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, @@ -49,20 +53,20 @@ "esm/**/*.d.ts", "esm/**/*.d.ts.map", "types/synapse-monitoring.d.ts", - "src/**/*.ts", "README.md", "rollup.config.js", "tsconfig.json" ], "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", - "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-monitoring.js.map'\" -o ./dist/synapse-monitoring.min.js ./dist/synapse-monitoring.js", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", "pack": "npm pack 2>&1", - "build:test": "echo skip", + "build:test": "echo skipped", "lint": "echo skipped", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "test": "echo skipped", "extract-api": "api-extractor run --local" }, "sideEffects": false, diff --git a/sdk/synapse/synapse-monitoring/rollup.config.js b/sdk/synapse/synapse-monitoring/rollup.config.js index 2b76f5847500..4eb6764521d6 100644 --- a/sdk/synapse/synapse-monitoring/rollup.config.js +++ b/sdk/synapse/synapse-monitoring/rollup.config.js @@ -7,11 +7,11 @@ import cjs from "@rollup/plugin-commonjs"; * @type {rollup.RollupFileOptions} */ const config = { - input: "./esm/monitoringClient.js", + input: "./dist-esm/monitoringClient.js", external: ["@azure/core-http", "@azure/core-arm"], output: { - file: "./dist/synapse-monitoring.js", - format: "umd", + file: "./dist/index.js", + format: "cjs", name: "Azure.SynapseMonitoring", sourcemap: true, globals: { diff --git a/sdk/synapse/synapse-monitoring/src/index.ts b/sdk/synapse/synapse-monitoring/src/index.ts index b9b74b67ef61..ddf0f4390553 100644 --- a/sdk/synapse/synapse-monitoring/src/index.ts +++ b/sdk/synapse/synapse-monitoring/src/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. export * from "./models"; export { MonitoringClient } from "./monitoringClient"; export { MonitoringClientContext } from "./monitoringClientContext"; diff --git a/sdk/synapse/synapse-monitoring/src/models/index.ts b/sdk/synapse/synapse-monitoring/src/models/index.ts index 4d28685ede7a..7ae7e94f2172 100644 --- a/sdk/synapse/synapse-monitoring/src/models/index.ts +++ b/sdk/synapse/synapse-monitoring/src/models/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; export interface SparkJobListViewResponse { diff --git a/sdk/synapse/synapse-monitoring/src/models/mappers.ts b/sdk/synapse/synapse-monitoring/src/models/mappers.ts index ec985d064a9c..230f5eeead71 100644 --- a/sdk/synapse/synapse-monitoring/src/models/mappers.ts +++ b/sdk/synapse/synapse-monitoring/src/models/mappers.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; export const SparkJobListViewResponse: coreHttp.CompositeMapper = { diff --git a/sdk/synapse/synapse-monitoring/src/models/parameters.ts b/sdk/synapse/synapse-monitoring/src/models/parameters.ts index 6f239c88c26f..299a2a659930 100644 --- a/sdk/synapse/synapse-monitoring/src/models/parameters.ts +++ b/sdk/synapse/synapse-monitoring/src/models/parameters.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { OperationParameter, OperationURLParameter, diff --git a/sdk/synapse/synapse-monitoring/src/monitoringClient.ts b/sdk/synapse/synapse-monitoring/src/monitoringClient.ts index 5baddaef9293..c1e74c64585d 100644 --- a/sdk/synapse/synapse-monitoring/src/monitoringClient.ts +++ b/sdk/synapse/synapse-monitoring/src/monitoringClient.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { Monitoring } from "./operations"; import { MonitoringClientContext } from "./monitoringClientContext"; diff --git a/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts b/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts index 039b98fadeb8..09fe6641e0d1 100644 --- a/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts +++ b/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { MonitoringClientOptionalParams } from "./models"; diff --git a/sdk/synapse/synapse-monitoring/src/operations/index.ts b/sdk/synapse/synapse-monitoring/src/operations/index.ts index c3443639fc87..a8ff2c177aec 100644 --- a/sdk/synapse/synapse-monitoring/src/operations/index.ts +++ b/sdk/synapse/synapse-monitoring/src/operations/index.ts @@ -1 +1,3 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. export * from "./monitoring"; diff --git a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts index 7dee9d6b5045..d526105f0c45 100644 --- a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts +++ b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-monitoring/src/tracing.ts b/sdk/synapse/synapse-monitoring/src/tracing.ts index 17451e9c02ae..bf65d560face 100644 --- a/sdk/synapse/synapse-monitoring/src/tracing.ts +++ b/sdk/synapse/synapse-monitoring/src/tracing.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { createSpanFunction } from "@azure/core-http"; export const createSpan = createSpanFunction({ diff --git a/sdk/synapse/synapse-monitoring/tsconfig.json b/sdk/synapse/synapse-monitoring/tsconfig.json index 0290d6707a44..d43efedfc9bc 100644 --- a/sdk/synapse/synapse-monitoring/tsconfig.json +++ b/sdk/synapse/synapse-monitoring/tsconfig.json @@ -12,7 +12,7 @@ "preserveConstEnums": true, "lib": ["es6", "dom"], "declaration": true, - "outDir": "./esm", + "outDir": "./dist-esm", "importHelpers": true }, "include": ["./src/**/*.ts"], diff --git a/sdk/synapse/synapse-spark/CHANGELOG.md b/sdk/synapse/synapse-spark/CHANGELOG.md new file mode 100644 index 000000000000..f3a1bab8f0ee --- /dev/null +++ b/sdk/synapse/synapse-spark/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 + +- Initial release diff --git a/sdk/synapse/synapse-spark/api-extractor.json b/sdk/synapse/synapse-spark/api-extractor.json index 048b504f92e3..b49278775b4e 100644 --- a/sdk/synapse/synapse-spark/api-extractor.json +++ b/sdk/synapse/synapse-spark/api-extractor.json @@ -1,6 +1,6 @@ { "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", - "mainEntryPointFilePath": "./esm/index.d.ts", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", "docModel": { "enabled": true }, "apiReport": { "enabled": true, "reportFolder": "./review" }, "dtsRollup": { diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json index a32a35df5c8e..5c130161725c 100644 --- a/sdk/synapse/synapse-spark/package.json +++ b/sdk/synapse/synapse-spark/package.json @@ -2,6 +2,8 @@ "name": "@azure/synapse-spark", "author": "Microsoft Corporation", "description": "A generated SDK for SparkClient.", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-spark/README.md", + "repository": "github:Azure/azure-sdk-for-js", "sdk-type": "client", "version": "1.0.0-beta.1", "dependencies": { @@ -14,11 +16,18 @@ "azure", "typescript", "browser", + "cloud", "isomorphic" ], "license": "MIT", - "main": "./dist/synapse-spark.js", - "module": "./esm/index.js", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", "types": "./types/synapse-spark.d.ts", "devDependencies": { "typescript": "~3.9.3", @@ -31,11 +40,6 @@ "uglify-js": "^3.4.9", "@microsoft/api-extractor": "7.7.11" }, - "homepage": "https://github.com/Azure/azure-sdk-for-js", - "repository": { - "type": "git", - "url": "https://github.com/Azure/azure-sdk-for-js.git" - }, "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, @@ -49,16 +53,16 @@ "esm/**/*.d.ts", "esm/**/*.d.ts.map", "types/synapse-spark.d.ts", - "src/**/*.ts", "README.md", "rollup.config.js", "tsconfig.json" ], "scripts": { "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", - "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/synapse-spark.js.map'\" -o ./dist/synapse-spark.min.js ./dist/synapse-spark.js", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", "pack": "npm pack 2>&1", - "build:test": "echo skip", + "build:test": "echo skipped", + "test": "echo skipped", "lint": "echo skipped", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", diff --git a/sdk/synapse/synapse-spark/rollup.config.js b/sdk/synapse/synapse-spark/rollup.config.js index b2b31142e226..3510554961f9 100644 --- a/sdk/synapse/synapse-spark/rollup.config.js +++ b/sdk/synapse/synapse-spark/rollup.config.js @@ -7,11 +7,11 @@ import cjs from "@rollup/plugin-commonjs"; * @type {rollup.RollupFileOptions} */ const config = { - input: "./esm/sparkClient.js", + input: "./dist-esm/sparkClient.js", external: ["@azure/core-http", "@azure/core-arm"], output: { - file: "./dist/synapse-spark.js", - format: "umd", + file: "./dist/index.js", + format: "cjs", name: "Azure.SynapseSpark", sourcemap: true, globals: { diff --git a/sdk/synapse/synapse-spark/src/index.ts b/sdk/synapse/synapse-spark/src/index.ts index 03e9c45aacdc..1d86e2ab2e56 100644 --- a/sdk/synapse/synapse-spark/src/index.ts +++ b/sdk/synapse/synapse-spark/src/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. export * from "./models"; export { SparkClient } from "./sparkClient"; export { SparkClientContext } from "./sparkClientContext"; diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts index c388e24dc015..944d0ac93474 100644 --- a/sdk/synapse/synapse-spark/src/models/index.ts +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; /** diff --git a/sdk/synapse/synapse-spark/src/models/mappers.ts b/sdk/synapse/synapse-spark/src/models/mappers.ts index 36d46edc918f..4b82776a4058 100644 --- a/sdk/synapse/synapse-spark/src/models/mappers.ts +++ b/sdk/synapse/synapse-spark/src/models/mappers.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; export const SparkBatchJobCollection: coreHttp.CompositeMapper = { diff --git a/sdk/synapse/synapse-spark/src/models/parameters.ts b/sdk/synapse/synapse-spark/src/models/parameters.ts index 38736a316780..eb4a77d11aab 100644 --- a/sdk/synapse/synapse-spark/src/models/parameters.ts +++ b/sdk/synapse/synapse-spark/src/models/parameters.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { OperationParameter, OperationURLParameter, diff --git a/sdk/synapse/synapse-spark/src/operations/index.ts b/sdk/synapse/synapse-spark/src/operations/index.ts index f9fb9020b68f..e627dd34f3db 100644 --- a/sdk/synapse/synapse-spark/src/operations/index.ts +++ b/sdk/synapse/synapse-spark/src/operations/index.ts @@ -1,2 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. export * from "./sparkBatch"; export * from "./sparkSession"; diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts index 507565abbb04..1fbb4dcf44d5 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts index 2c0d38793843..bc78e424cf71 100644 --- a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { CanonicalCode } from "@opentelemetry/api"; import { createSpan } from "../tracing"; import * as coreHttp from "@azure/core-http"; diff --git a/sdk/synapse/synapse-spark/src/sparkClient.ts b/sdk/synapse/synapse-spark/src/sparkClient.ts index 3dbe2ecb1898..e456808dcff2 100644 --- a/sdk/synapse/synapse-spark/src/sparkClient.ts +++ b/sdk/synapse/synapse-spark/src/sparkClient.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { SparkBatch, SparkSession } from "./operations"; import { SparkClientContext } from "./sparkClientContext"; diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts index 72e555659ee5..f77964a1ce6b 100644 --- a/sdk/synapse/synapse-spark/src/sparkClientContext.ts +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import * as coreHttp from "@azure/core-http"; import { SparkClientOptionalParams } from "./models"; diff --git a/sdk/synapse/synapse-spark/src/tracing.ts b/sdk/synapse/synapse-spark/src/tracing.ts index bb74ae42133b..08ce4428a831 100644 --- a/sdk/synapse/synapse-spark/src/tracing.ts +++ b/sdk/synapse/synapse-spark/src/tracing.ts @@ -1,3 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { createSpanFunction } from "@azure/core-http"; export const createSpan = createSpanFunction({ diff --git a/sdk/synapse/synapse-spark/tsconfig.json b/sdk/synapse/synapse-spark/tsconfig.json index 0290d6707a44..d43efedfc9bc 100644 --- a/sdk/synapse/synapse-spark/tsconfig.json +++ b/sdk/synapse/synapse-spark/tsconfig.json @@ -12,7 +12,7 @@ "preserveConstEnums": true, "lib": ["es6", "dom"], "declaration": true, - "outDir": "./esm", + "outDir": "./dist-esm", "importHelpers": true }, "include": ["./src/**/*.ts"], From af087990ebda760d2b9af9af0d200f301f0efa0e Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Thu, 10 Dec 2020 07:45:45 +1300 Subject: [PATCH 24/28] Add copyright and improve generation --- .../src/models/index.ts | 3 +- .../src/artifactsClientContext.ts | 14 +--- .../src/lro/azureAsyncOperationStrategy.ts | 42 +++-------- .../src/lro/bodyPollingStrategy.ts | 5 +- .../src/lro/locationStrategy.ts | 7 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 8 +-- .../synapse-artifacts/src/lro/lroPoller.ts | 27 ++------ .../synapse-artifacts/src/lro/models.ts | 8 +-- .../synapse-artifacts/src/lro/operation.ts | 4 +- .../synapse-artifacts/src/lro/requestUtils.ts | 16 +---- .../synapse-artifacts/src/models/index.ts | 69 ++++++------------- .../synapse-artifacts/src/models/mappers.ts | 46 ++++--------- .../src/models/index.ts | 3 +- .../synapse-monitoring/src/models/index.ts | 9 +-- sdk/synapse/synapse-spark/src/models/index.ts | 21 ++---- .../synapse-spark/src/sparkClientContext.ts | 3 +- 16 files changed, 74 insertions(+), 211 deletions(-) diff --git a/sdk/synapse/synapse-access-control/src/models/index.ts b/sdk/synapse/synapse-access-control/src/models/index.ts index 2810fedee70b..4fa06c5c841a 100644 --- a/sdk/synapse/synapse-access-control/src/models/index.ts +++ b/sdk/synapse/synapse-access-control/src/models/index.ts @@ -271,8 +271,7 @@ export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListRespons /** * Optional parameters. */ -export interface AccessControlClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts index d5215310dfbc..d5df64d397dc 100644 --- a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -50,17 +50,12 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { // When an array of factories is passed in, we'll just add the required factories, // in this case lroPolicy(). It is important to note that passing an array of factories // to a new client, bypasses core-http default factories. Just the pipelines provided will be run. - options.requestPolicyFactories = [ - lroPolicy(), - ...options.requestPolicyFactories - ]; + options.requestPolicyFactories = [lroPolicy(), ...options.requestPolicyFactories]; } else if (options.requestPolicyFactories) { // When we were passed a requestPolicyFactories as a function, we'll create a new one that adds the factories provided // in the options plus the required policies. When using this path, the pipelines passed to the client will be added to the // default policies added by core-http - const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [ - lroPolicy() - ]; + const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [lroPolicy()]; options.requestPolicyFactories = (defaultFactories) => [ ...optionsPolicies, ...defaultFactories @@ -68,10 +63,7 @@ export class ArtifactsClientContext extends coreHttp.ServiceClient { } else { // In case no request policy factories were provided, we'll just need to create a function that will add // the lroPolicy to the default pipelines added by core-http - options.requestPolicyFactories = (defaultFactories) => [ - lroPolicy(), - ...defaultFactories - ]; + options.requestPolicyFactories = (defaultFactories) => [lroPolicy(), ...defaultFactories]; } super(credentials, options); diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts index 587f21b6a9ec..b1619e36eea0 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -8,11 +8,7 @@ import { FinalStateVia, LROSYM } from "./models"; -import { - OperationSpec, - OperationArguments, - OperationResponse -} from "@azure/core-http"; +import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; import { terminalStates } from "./constants"; import { SendOperationFn } from "."; @@ -23,14 +19,11 @@ export function createAzureAsyncOperationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; - let lastKnownPollingUrl = - lroData.azureAsyncOperation || lroData.operationLocation; + let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; return { isTerminal: () => { @@ -61,17 +54,12 @@ export function createAzureAsyncOperationStrategy( const initialOperationResult = initialOperation.result._response[LROSYM]; const currentOperationResult = currentOperation.result._response[LROSYM]; - if ( - !shouldPerformFinalGet(initialOperationResult, currentOperationResult) - ) { + if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { return currentOperation; } if (initialOperationResult?.requestMethod === "PUT") { - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; } @@ -79,29 +67,20 @@ export function createAzureAsyncOperationStrategy( if (initialOperationResult?.location) { switch (finalStateVia) { case "original-uri": - currentOperation = await sendFinalGet( - initialOperation, - sendOperationFn - ); + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); return currentOperation; case "azure-async-operation": return currentOperation; case "location": default: - const location = - initialOperationResult.location || - currentOperationResult?.location; + const location = initialOperationResult.location || currentOperationResult?.location; if (!location) { throw new Error("Couldn't determine final GET URL from location"); } - return await sendFinalGet( - initialOperation, - sendOperationFn, - location - ); + return await sendFinalGet(initialOperation, sendOperationFn, location); } } @@ -179,10 +158,7 @@ function getCompositeMappers(responses: { }, {} as { [responseCode: string]: OperationResponse }); } -function shouldPerformFinalGet( - initialResult?: LROResponseInfo, - currentResult?: LROResponseInfo -) { +function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { const { status } = currentResult || {}; const { requestMethod: initialRequestMethod, location } = initialResult || {}; if (status && status.toLowerCase() !== "succeeded") { diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts index f0f2736b3859..f5ab0f2b7010 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -46,10 +46,7 @@ export function createBodyPollingStrategy( }; // Execute the polling operation - initialOperation.result = await sendOperation( - initialOperation.args, - pollingSpec - ); + initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); return initialOperation; } }; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts index 29a761e77093..9ed079ccff59 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -10,9 +10,7 @@ export function createLocationStrategy( ): LROStrategy { const lroData = initialOperation.result._response[LROSYM]; if (!lroData) { - throw new Error( - "Expected lroData to be defined for Azure-AsyncOperation strategy" - ); + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); } let currentOperation = initialOperation; @@ -53,8 +51,7 @@ export function createLocationStrategy( const result = await sendOperationFn(pollingArgs, pollingSpec); // Update latest polling url - lastKnownPollingUrl = - result._response[LROSYM]?.location || lastKnownPollingUrl; + lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; // Update lastOperation result currentOperation = { diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index 82d123e2949f..ae121be22ebb 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -23,12 +23,8 @@ class LROPolicy extends BaseRequestPolicy { super(nextPolicy, options); } - public async sendRequest( - webResource: WebResource - ): Promise { - let result: LROOperationResponse = await this._nextPolicy.sendRequest( - webResource - ); + public async sendRequest(webResource: WebResource): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); const _lroData = getLROData(result); result[LROSYM] = _lroData; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts index ebc20bd63868..b66b626cd18c 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -1,19 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. import { Poller } from "@azure/core-lro"; -import { - OperationSpec, - OperationArguments, - delay, - RestError -} from "@azure/core-http"; -import { - BaseResult, - LROOperationState, - LROOperationStep, - FinalStateVia, - LROSYM -} from "./models"; +import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; +import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; import { makeOperation } from "./operation"; import { createBodyPollingStrategy } from "./bodyPollingStrategy"; import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; @@ -72,11 +61,7 @@ export class LROPoller extends Poller< result: initialOperationResult }; - const pollingStrategy = getPollingStrategy( - initialOperation, - sendOperation, - finalStateVia - ); + const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); const state: LROOperationState = { // Initial operation will become the last operation @@ -129,11 +114,7 @@ function getPollingStrategy( } if (lroData.azureAsyncOperation || lroData.operationLocation) { - return createAzureAsyncOperationStrategy( - initialOperation, - sendOperationFn, - finalStateVia - ); + return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); } if (lroData.location) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts index ebc1f1c3f0ff..a7b811e1b81b 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/models.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -10,10 +10,7 @@ import { import { PollOperationState, PollOperation } from "@azure/core-lro"; export const LROSYM = Symbol("LROData"); -export type FinalStateVia = - | "azure-async-operation" - | "location" - | "original-uri"; +export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; export interface LROResponseInfo { requestMethod: HttpMethods; @@ -49,8 +46,7 @@ export interface LROOperationStep { result: TResult; } -export interface LROOperationState - extends PollOperationState { +export interface LROOperationState extends PollOperationState { lastOperation: LROOperationStep; initialOperation: LROOperationStep; pollingStrategy: LROStrategy; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts index 379bcdcf5f63..f35752e2da6b 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/operation.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -43,9 +43,7 @@ async function update( const currentLroData = currentResponse.result._response[LROSYM]; if (!currentLroData) { - throw new Error( - "Expected lroData to be defined for updating LRO operation" - ); + throw new Error("Expected lroData to be defined for updating LRO operation"); } if (state.result) { diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts index a3cd8b9d269b..5e9e3cabdcef 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -23,17 +23,10 @@ export function shouldDeserializeLRO(finalStateVia?: string) { isInitialRequest = false; } - if ( - initialOperationInfo.azureAsyncOperation || - initialOperationInfo.operationLocation - ) { + if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { return ( !isInitialRequest && - isAsyncOperationFinalResponse( - response, - initialOperationInfo, - finalStateVia - ) + isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) ); } @@ -71,10 +64,7 @@ function isAsyncOperationFinalResponse( return true; } - if ( - initialOperationInfo.requestMethod !== "PUT" && - !initialOperationInfo.location - ) { + if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { return true; } diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts index 9f6c5e23e798..2f65ec8ff68f 100644 --- a/sdk/synapse/synapse-artifacts/src/models/index.ts +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -209,10 +209,7 @@ export type IntegrationRuntimeUnion = | IntegrationRuntime | ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; -export type SecretBaseUnion = - | SecretBase - | SecureString - | AzureKeyVaultSecretReference; +export type SecretBaseUnion = SecretBase | SecureString | AzureKeyVaultSecretReference; export type DatasetLocationUnion = | DatasetLocation | AzureBlobStorageLocation @@ -264,9 +261,7 @@ export type StoreWriteSettingsUnion = | AzureBlobFSWriteSettings | AzureDataLakeStoreWriteSettings | FileServerWriteSettings; -export type FormatReadSettingsUnion = - | FormatReadSettings - | DelimitedTextReadSettings; +export type FormatReadSettingsUnion = FormatReadSettings | DelimitedTextReadSettings; export type FormatWriteSettingsUnion = | FormatWriteSettings | AvroWriteSettings @@ -3162,12 +3157,7 @@ export interface DatasetStorageFormat { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "TextFormat" - | "JsonFormat" - | "AvroFormat" - | "OrcFormat" - | "ParquetFormat"; + type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -3357,10 +3347,7 @@ export interface FormatWriteSettings { /** * Polymorphic discriminator, which specifies the different types this object can be */ - type: - | "AvroWriteSettings" - | "DelimitedTextWriteSettings" - | "JsonWriteSettings"; + type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; /** * Describes unknown properties. The value of an unknown property can be of "any" type. */ @@ -16236,8 +16223,7 @@ export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceReso /** * Optional parameters. */ -export interface LinkedServiceGetLinkedServiceOptionalParams - extends coreHttp.OperationOptions { +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { /** * ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -16307,8 +16293,7 @@ export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { /** * Optional parameters. */ -export interface DatasetCreateOrUpdateDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -16342,8 +16327,7 @@ export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { /** * Optional parameters. */ -export interface DatasetGetDatasetOptionalParams - extends coreHttp.OperationOptions { +export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { /** * ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -16413,8 +16397,7 @@ export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { /** * Optional parameters. */ -export interface PipelineCreateOrUpdatePipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -16448,8 +16431,7 @@ export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineGetPipelineOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { /** * ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -16479,8 +16461,7 @@ export type PipelineGetPipelineResponse = PipelineResource & { /** * Optional parameters. */ -export interface PipelineCreatePipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { /** * Parameters of the pipeline run. These parameters will be used only if the runId is not specified. */ @@ -16602,8 +16583,7 @@ export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { /** * Optional parameters. */ -export interface PipelineRunCancelPipelineRunOptionalParams - extends coreHttp.OperationOptions { +export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { /** * If true, cancel all the Child pipelines that are triggered by the current pipeline. */ @@ -16633,8 +16613,7 @@ export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { /** * Optional parameters. */ -export interface TriggerCreateOrUpdateTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -16668,8 +16647,7 @@ export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { /** * Optional parameters. */ -export interface TriggerGetTriggerOptionalParams - extends coreHttp.OperationOptions { +export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { /** * ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -16807,8 +16785,7 @@ export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResp /** * Optional parameters. */ -export interface DataFlowCreateOrUpdateDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -16842,8 +16819,7 @@ export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { /** * Optional parameters. */ -export interface DataFlowGetDataFlowOptionalParams - extends coreHttp.OperationOptions { +export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { /** * ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -17041,8 +17017,7 @@ export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & /** * Optional parameters. */ -export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { /** * ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -17072,8 +17047,7 @@ export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { /** * Optional parameters. */ -export interface SqlScriptGetSqlScriptOptionalParams - extends coreHttp.OperationOptions { +export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { /** * ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -17313,8 +17287,7 @@ export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse /** * Optional parameters. */ -export interface NotebookCreateOrUpdateNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { /** * ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. */ @@ -17348,8 +17321,7 @@ export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { /** * Optional parameters. */ -export interface NotebookGetNotebookOptionalParams - extends coreHttp.OperationOptions { +export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { /** * ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. */ @@ -17590,8 +17562,7 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces /** * Optional parameters. */ -export interface ArtifactsClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts index a873ab2f23eb..ca53b45a33f6 100644 --- a/sdk/synapse/synapse-artifacts/src/models/mappers.ts +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -14755,8 +14755,7 @@ export const TextFormat: coreHttp.CompositeMapper = { className: "TextFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, columnDelimiter: { @@ -14824,8 +14823,7 @@ export const JsonFormat: coreHttp.CompositeMapper = { className: "JsonFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties, filePattern: { @@ -14869,8 +14867,7 @@ export const AvroFormat: coreHttp.CompositeMapper = { className: "AvroFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14884,8 +14881,7 @@ export const OrcFormat: coreHttp.CompositeMapper = { className: "OrcFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14899,8 +14895,7 @@ export const ParquetFormat: coreHttp.CompositeMapper = { className: "ParquetFormat", uberParent: "DatasetStorageFormat", additionalProperties: { type: { name: "Object" } }, - polymorphicDiscriminator: - DatasetStorageFormat.type.polymorphicDiscriminator, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, modelProperties: { ...DatasetStorageFormat.type.modelProperties } @@ -14987,8 +14982,7 @@ export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebAnonymousAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties } @@ -15001,8 +14995,7 @@ export const WebBasicAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebBasicAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, username: { @@ -15029,8 +15022,7 @@ export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { name: "Composite", className: "WebClientCertificateAuthentication", uberParent: "WebLinkedServiceTypeProperties", - polymorphicDiscriminator: - WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, modelProperties: { ...WebLinkedServiceTypeProperties.type.modelProperties, pfx: { @@ -17644,9 +17636,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap ...DependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -17658,9 +17648,7 @@ export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMap }, size: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -17679,8 +17667,7 @@ export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeKeyAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, key: { @@ -17700,8 +17687,7 @@ export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper name: "Composite", className: "LinkedIntegrationRuntimeRbacAuthorization", uberParent: "LinkedIntegrationRuntimeType", - polymorphicDiscriminator: - LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, modelProperties: { ...LinkedIntegrationRuntimeType.type.modelProperties, resourceId: { @@ -21056,9 +21042,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper ...TriggerDependencyReference.type.modelProperties, offset: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, @@ -21069,9 +21053,7 @@ export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper }, size: { constraints: { - Pattern: new RegExp( - "((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))" - ), + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), MaxLength: 15, MinLength: 8 }, diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts index 315825b4985c..b7142b7e34f9 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts @@ -172,8 +172,7 @@ export type ManagedPrivateEndpointsListNextResponse = ManagedPrivateEndpointList /** * Optional parameters. */ -export interface ManagedPrivateEndpointsClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface ManagedPrivateEndpointsClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-monitoring/src/models/index.ts b/sdk/synapse/synapse-monitoring/src/models/index.ts index 7ae7e94f2172..447c3a9d5ba4 100644 --- a/sdk/synapse/synapse-monitoring/src/models/index.ts +++ b/sdk/synapse/synapse-monitoring/src/models/index.ts @@ -32,8 +32,7 @@ export interface SqlQueryStringDataModel { /** * Optional parameters. */ -export interface MonitoringGetSparkJobListOptionalParams - extends coreHttp.OperationOptions { +export interface MonitoringGetSparkJobListOptionalParams extends coreHttp.OperationOptions { /** * Can provide a guid, which is helpful for debugging and to provide better customer support */ @@ -63,8 +62,7 @@ export type MonitoringGetSparkJobListResponse = SparkJobListViewResponse & { /** * Optional parameters. */ -export interface MonitoringGetSqlJobQueryStringOptionalParams - extends coreHttp.OperationOptions { +export interface MonitoringGetSqlJobQueryStringOptionalParams extends coreHttp.OperationOptions { /** * Can provide a guid, which is helpful for debugging and to provide better customer support */ @@ -97,8 +95,7 @@ export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { /** * Optional parameters. */ -export interface MonitoringClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface MonitoringClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Api Version */ diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts index 944d0ac93474..3bc98153fa09 100644 --- a/sdk/synapse/synapse-spark/src/models/index.ts +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -467,8 +467,7 @@ export type SparkStatementLanguageType = string; /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobsOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -507,8 +506,7 @@ export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { /** * Optional parameters. */ -export interface SparkBatchCreateSparkBatchJobOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -538,8 +536,7 @@ export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkBatchGetSparkBatchJobOptionalParams - extends coreHttp.OperationOptions { +export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -569,8 +566,7 @@ export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionsOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { /** * Optional param specifying which index the list should begin from. */ @@ -609,8 +605,7 @@ export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { /** * Optional parameters. */ -export interface SparkSessionCreateSparkSessionOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -640,8 +635,7 @@ export type SparkSessionCreateSparkSessionResponse = SparkSession & { /** * Optional parameters. */ -export interface SparkSessionGetSparkSessionOptionalParams - extends coreHttp.OperationOptions { +export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { /** * Optional query param specifying whether detailed response is returned beyond plain livy. */ @@ -751,8 +745,7 @@ export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellatio /** * Optional parameters. */ -export interface SparkClientOptionalParams - extends coreHttp.ServiceClientOptions { +export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { /** * Valid api-version for the request. */ diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts index f77964a1ce6b..cb070140f975 100644 --- a/sdk/synapse/synapse-spark/src/sparkClientContext.ts +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -54,8 +54,7 @@ export class SparkClientContext extends coreHttp.ServiceClient { this.requestContentType = "application/json; charset=utf-8"; this.baseUri = - options.endpoint || - "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; + options.endpoint || "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; // Parameter assignments this.endpoint = endpoint; From 70921a551c704442a36dfd478033f102129ce1cf Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Thu, 10 Dec 2020 08:12:26 +1300 Subject: [PATCH 25/28] Fix keyvault lint --- sdk/keyvault/keyvault-secrets/package.json | 1 + sdk/synapse/synapse-artifacts/package.json | 2 +- sdk/synapse/synapse-managed-private-endpoints/package.json | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/sdk/keyvault/keyvault-secrets/package.json b/sdk/keyvault/keyvault-secrets/package.json index 928709dbaf73..1004d0301c37 100644 --- a/sdk/keyvault/keyvault-secrets/package.json +++ b/sdk/keyvault/keyvault-secrets/package.json @@ -12,6 +12,7 @@ "azure", "typescript", "browser", + "cloud", "isomorphic", "keyvault" ], diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json index 8644dea6744d..519273b927e9 100644 --- a/sdk/synapse/synapse-artifacts/package.json +++ b/sdk/synapse/synapse-artifacts/package.json @@ -64,7 +64,7 @@ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", "pack": "npm pack 2>&1", "build:test": "echo skip", - "lint": "echo skip", + "lint": "echo skipped", "test": "echo skip", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json index ac6207ffb312..736332b45096 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/package.json +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -64,7 +64,7 @@ "test": "echo skip", "build:test": "echo skip", "pack": "npm pack 2>&1", - "lint": "echo skip", + "lint": "echo skipped", "unit-test:browser": "echo skipped", "unit-test:node": "echo skipped", "unit-test": "npm run unit-test:node && npm run unit-test:browser", From 8274a68f32d932b7ab9aa312494c969dba2b10c5 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Thu, 10 Dec 2020 09:52:09 +1300 Subject: [PATCH 26/28] Export fixes --- .../review/synapse-artifacts.api.md | 300 +++++++++++++++--- sdk/synapse/synapse-artifacts/src/index.ts | 8 + .../synapse-artifacts/src/lro/index.ts | 4 +- .../synapse-artifacts/src/lro/lroPolicy.ts | 2 +- .../synapse-managed-private-endpoints.api.md | 13 +- .../src/index.ts | 1 + .../review/synapse-monitoring.api.md | 11 +- sdk/synapse/synapse-monitoring/src/index.ts | 1 + .../synapse-spark/review/synapse-spark.api.md | 31 +- sdk/synapse/synapse-spark/src/index.ts | 1 + 10 files changed, 306 insertions(+), 66 deletions(-) diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md index f6236151ca3b..5ca1f8d5ad29 100644 --- a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -169,70 +169,38 @@ export interface ArtifactRenameRequest { // @public (undocumented) export class ArtifactsClient extends ArtifactsClientContext { constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ArtifactsClientOptionalParams); - // Warning: (ae-forgotten-export) The symbol "BigDataPools" needs to be exported by the entry point index.d.ts - // // (undocumented) - bigDataPools: BigDataPools; - // Warning: (ae-forgotten-export) The symbol "DataFlow" needs to be exported by the entry point index.d.ts - // + bigDataPools: BigDataPoolsOperation; // (undocumented) - dataFlow: DataFlow_2; - // Warning: (ae-forgotten-export) The symbol "DataFlowDebugSession" needs to be exported by the entry point index.d.ts - // + dataFlow: DataFlowOperation; // (undocumented) - dataFlowDebugSession: DataFlowDebugSession; - // Warning: (ae-forgotten-export) The symbol "Dataset" needs to be exported by the entry point index.d.ts - // + dataFlowDebugSession: DataFlowDebugSessionOperation; // (undocumented) - dataset: Dataset_2; - // Warning: (ae-forgotten-export) The symbol "IntegrationRuntimes" needs to be exported by the entry point index.d.ts - // + dataset: DatasetOperation; // (undocumented) - integrationRuntimes: IntegrationRuntimes; - // Warning: (ae-forgotten-export) The symbol "LinkedService" needs to be exported by the entry point index.d.ts - // + integrationRuntimes: IntegrationRuntimesOperation; // (undocumented) - linkedService: LinkedService_2; - // Warning: (ae-forgotten-export) The symbol "Notebook" needs to be exported by the entry point index.d.ts - // + linkedService: LinkedServiceOperation; // (undocumented) - notebook: Notebook_2; - // Warning: (ae-forgotten-export) The symbol "Pipeline" needs to be exported by the entry point index.d.ts - // + notebook: NotebookOperation; // (undocumented) - pipeline: Pipeline; - // Warning: (ae-forgotten-export) The symbol "PipelineRun" needs to be exported by the entry point index.d.ts - // + pipeline: PipelineOperation; // (undocumented) - pipelineRun: PipelineRun_2; - // Warning: (ae-forgotten-export) The symbol "SparkJobDefinition" needs to be exported by the entry point index.d.ts - // + pipelineRun: PipelineRunOperation; // (undocumented) - sparkJobDefinition: SparkJobDefinition_2; - // Warning: (ae-forgotten-export) The symbol "SqlPools" needs to be exported by the entry point index.d.ts - // + sparkJobDefinition: SparkJobDefinitionOperation; // (undocumented) - sqlPools: SqlPools; - // Warning: (ae-forgotten-export) The symbol "SqlScript" needs to be exported by the entry point index.d.ts - // + sqlPools: SqlPoolsOperation; // (undocumented) - sqlScript: SqlScript_2; - // Warning: (ae-forgotten-export) The symbol "Trigger" needs to be exported by the entry point index.d.ts - // + sqlScript: SqlScriptOperation; // (undocumented) - trigger: Trigger_2; - // Warning: (ae-forgotten-export) The symbol "TriggerRun" needs to be exported by the entry point index.d.ts - // + trigger: TriggerOperation; // (undocumented) - triggerRun: TriggerRun_2; - // Warning: (ae-forgotten-export) The symbol "Workspace" needs to be exported by the entry point index.d.ts - // + triggerRun: TriggerRunOperation; // (undocumented) - workspace: Workspace_2; - // Warning: (ae-forgotten-export) The symbol "WorkspaceGitRepoManagement" needs to be exported by the entry point index.d.ts - // + workspace: WorkspaceOperation; // (undocumented) - workspaceGitRepoManagement: WorkspaceGitRepoManagement; + workspaceGitRepoManagement: WorkspaceGitRepoManagementOperation; } // @public (undocumented) @@ -872,6 +840,11 @@ export type AzureTableStorageLinkedService = LinkedService & { encryptedCredential?: string; }; +// @public (undocumented) +export interface BaseResult extends RestResponse { + _response: LROOperationResponse; +} + // @public export interface BigDataPoolReference { referenceName: string; @@ -922,6 +895,13 @@ export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult & { }; }; +// @public +export class BigDataPoolsOperation { + constructor(client: ArtifactsClient); + get(bigDataPoolName: string, options?: coreHttp.OperationOptions): Promise; + list(options?: coreHttp.OperationOptions): Promise; +} + // @public export type BinaryDataset = Dataset & { type: "Binary"; @@ -1453,6 +1433,16 @@ export interface DataFlowDebugSessionInfo { timeToLiveInMinutes?: number; } +// @public +export class DataFlowDebugSessionOperation { + constructor(client: ArtifactsClient); + addDataFlow(request: DataFlowDebugPackage, options?: coreHttp.OperationOptions): Promise; + createDataFlowDebugSession(request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions): Promise>; + deleteDataFlowDebugSession(request: DeleteDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions): Promise; + executeCommand(request: DataFlowDebugCommandRequest, options?: coreHttp.OperationOptions): Promise>; + listQueryDataFlowDebugSessionsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + } + // @public export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse & { _response: coreHttp.HttpResponse & { @@ -1517,6 +1507,16 @@ export interface DataFlowListResponse { value: DataFlowResource[]; } +// @public +export class DataFlowOperation { + constructor(client: ArtifactsClient); + createOrUpdateDataFlow(dataFlowName: string, dataFlow: DataFlowResource, options?: DataFlowCreateOrUpdateDataFlowOptionalParams): Promise>; + deleteDataFlow(dataFlowName: string, options?: coreHttp.OperationOptions): Promise>; + getDataFlow(dataFlowName: string, options?: DataFlowGetDataFlowOptionalParams): Promise; + listDataFlowsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameDataFlow(dataFlowName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + // @public export interface DataFlowReference { [property: string]: any; @@ -1699,6 +1699,16 @@ export interface DatasetLocation { // @public (undocumented) export type DatasetLocationUnion = DatasetLocation | AzureBlobStorageLocation | AzureBlobFSLocation | AzureDataLakeStoreLocation | AmazonS3Location | FileServerLocation | AzureFileStorageLocation | GoogleCloudStorageLocation | FtpServerLocation | SftpLocation | HttpServerLocation | HdfsLocation; +// @public +export class DatasetOperation { + constructor(client: ArtifactsClient); + createOrUpdateDataset(datasetName: string, dataset: DatasetResource, options?: DatasetCreateOrUpdateDatasetOptionalParams): Promise>; + deleteDataset(datasetName: string, options?: coreHttp.OperationOptions): Promise>; + getDataset(datasetName: string, options?: DatasetGetDatasetOptionalParams): Promise; + listDatasetsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameDataset(datasetName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + // @public export interface DatasetReference { parameters?: { @@ -2221,6 +2231,9 @@ export type FilterActivity = Activity & { condition: Expression; }; +// @public (undocumented) +export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; + // @public export type ForEachActivity = Activity & { type: "ForEach"; @@ -2881,6 +2894,13 @@ export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse & { }; }; +// @public +export class IntegrationRuntimesOperation { + constructor(client: ArtifactsClient); + get(integrationRuntimeName: string, options?: coreHttp.OperationOptions): Promise; + list(options?: coreHttp.OperationOptions): Promise; +} + // @public export interface IntegrationRuntimeSsisCatalogInfo { [property: string]: any; @@ -4082,6 +4102,16 @@ export interface LinkedServiceListResponse { value: LinkedServiceResource[]; } +// @public +export class LinkedServiceOperation { + constructor(client: ArtifactsClient); + createOrUpdateLinkedService(linkedServiceName: string, linkedService: LinkedServiceResource, options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams): Promise>; + deleteLinkedService(linkedServiceName: string, options?: coreHttp.OperationOptions): Promise>; + getLinkedService(linkedServiceName: string, options?: LinkedServiceGetLinkedServiceOptionalParams): Promise; + listLinkedServicesByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameLinkedService(linkedServiceName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + // @public export interface LinkedServiceReference { parameters?: { @@ -4114,6 +4144,79 @@ export type LookupActivity = ExecutionActivity & { firstRowOnly?: any; }; +// @public +export type LROOperationResponse = HttpOperationResponse & { + [LROSYM]?: LROResponseInfo; +}; + +// @public (undocumented) +export interface LROOperationState extends PollOperationState { + // (undocumented) + finalStateVia?: FinalStateVia; + // (undocumented) + initialOperation: LROOperationStep; + // (undocumented) + lastOperation: LROOperationStep; + // (undocumented) + pollingStrategy: LROStrategy; +} + +// @public (undocumented) +export interface LROOperationStep { + // (undocumented) + args: OperationArguments; + // (undocumented) + result: TResult; + // (undocumented) + spec: OperationSpec; +} + +// @public (undocumented) +export class LROPoller extends Poller, TResult> { + constructor({ initialOperationArguments, initialOperationResult, initialOperationSpec, sendOperation, finalStateVia, intervalInMs }: LROPollerOptions); + delay(): Promise; + } + +// @public (undocumented) +export interface LROPollerOptions { + finalStateVia?: FinalStateVia; + initialOperationArguments: OperationArguments; + initialOperationResult: TResult; + initialOperationSpec: OperationSpec; + intervalInMs?: number; + sendOperation: SendOperationFn; +} + +// @public (undocumented) +export interface LROResponseInfo { + // (undocumented) + azureAsyncOperation?: string; + // (undocumented) + isInitialRequest?: boolean; + // (undocumented) + location?: string; + // (undocumented) + operationLocation?: string; + // (undocumented) + provisioningState?: string; + // (undocumented) + requestMethod: HttpMethods; + // (undocumented) + status?: string; + // (undocumented) + statusCode: number; +} + +// @public (undocumented) +export interface LROStrategy { + // (undocumented) + isTerminal: () => boolean; + // (undocumented) + poll: () => Promise>; + // (undocumented) + sendFinalRequest: () => Promise>; +} + // @public export type MagentoLinkedService = LinkedService & { type: "Magento"; @@ -4489,6 +4592,17 @@ export interface NotebookMetadata { languageInfo?: NotebookLanguageInfo; } +// @public +export class NotebookOperation { + constructor(client: ArtifactsClient); + createOrUpdateNotebook(notebookName: string, notebook: NotebookResource, options?: NotebookCreateOrUpdateNotebookOptionalParams): Promise>; + deleteNotebook(notebookName: string, options?: coreHttp.OperationOptions): Promise>; + getNotebook(notebookName: string, options?: NotebookGetNotebookOptionalParams): Promise; + listNotebooksByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + listNotebookSummaryByWorkSpace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameNotebook(notebookName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + // @public export type NotebookReferenceType = string; @@ -4861,6 +4975,17 @@ export interface PipelineListResponse { value: PipelineResource[]; } +// @public +export class PipelineOperation { + constructor(client: ArtifactsClient); + createOrUpdatePipeline(pipelineName: string, pipeline: PipelineResource, options?: PipelineCreateOrUpdatePipelineOptionalParams): Promise>; + createPipelineRun(pipelineName: string, options?: PipelineCreatePipelineRunOptionalParams): Promise; + deletePipeline(pipelineName: string, options?: coreHttp.OperationOptions): Promise>; + getPipeline(pipelineName: string, options?: PipelineGetPipelineOptionalParams): Promise; + listPipelinesByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renamePipeline(pipelineName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + // @public export interface PipelineReference { name?: string; @@ -4929,6 +5054,15 @@ export interface PipelineRunInvokedBy { readonly name?: string; } +// @public +export class PipelineRunOperation { + constructor(client: ArtifactsClient); + cancelPipelineRun(runId: string, options?: PipelineRunCancelPipelineRunOptionalParams): Promise; + getPipelineRun(runId: string, options?: coreHttp.OperationOptions): Promise; + queryActivityRuns(pipelineName: string, runId: string, filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions): Promise; + queryPipelineRunsByWorkspace(filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions): Promise; +} + // @public export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { _response: coreHttp.HttpResponse & { @@ -5603,6 +5737,9 @@ export type SelfHostedIntegrationRuntime = IntegrationRuntime & { linkedInfo?: LinkedIntegrationRuntimeTypeUnion; }; +// @public (undocumented) +export type SendOperationFn = (args: OperationArguments, spec: OperationSpec) => Promise; + // @public export type ServiceNowAuthenticationType = string; @@ -5830,6 +5967,18 @@ export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJ }; }; +// @public +export class SparkJobDefinitionOperation { + constructor(client: ArtifactsClient); + createOrUpdateSparkJobDefinition(sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams): Promise; + debugSparkJobDefinition(sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: coreHttp.OperationOptions): Promise>; + deleteSparkJobDefinition(sparkJobDefinitionName: string, options?: coreHttp.OperationOptions): Promise; + executeSparkJobDefinition(sparkJobDefinitionName: string, options?: coreHttp.OperationOptions): Promise>; + getSparkJobDefinition(sparkJobDefinitionName: string, options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams): Promise; + listSparkJobDefinitionsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameSparkJobDefinition(sparkJobDefinitionName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + // @public export type SparkJobDefinitionResource = AzureEntityResource & { properties: SparkJobDefinition; @@ -6075,6 +6224,13 @@ export type SqlPoolsListResponse = SqlPoolInfoListResult & { }; }; +// @public +export class SqlPoolsOperation { + constructor(client: ArtifactsClient); + get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise; + list(options?: coreHttp.OperationOptions): Promise; +} + // @public export type SqlPoolStoredProcedureActivity = Activity & { type: "SqlPoolStoredProcedure"; @@ -6149,6 +6305,16 @@ export interface SqlScriptMetadata { language?: string; } +// @public +export class SqlScriptOperation { + constructor(client: ArtifactsClient); + createOrUpdateSqlScript(sqlScriptName: string, sqlScript: SqlScriptResource, options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams): Promise; + deleteSqlScript(sqlScriptName: string, options?: coreHttp.OperationOptions): Promise; + getSqlScript(sqlScriptName: string, options?: SqlScriptGetSqlScriptOptionalParams): Promise; + listSqlScriptsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameSqlScript(sqlScriptName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + // @public export interface SqlScriptResource { readonly etag?: string; @@ -6630,6 +6796,20 @@ export interface TriggerListResponse { value: TriggerResource[]; } +// @public +export class TriggerOperation { + constructor(client: ArtifactsClient); + createOrUpdateTrigger(triggerName: string, trigger: TriggerResource, options?: TriggerCreateOrUpdateTriggerOptionalParams): Promise>; + deleteTrigger(triggerName: string, options?: coreHttp.OperationOptions): Promise>; + getEventSubscriptionStatus(triggerName: string, options?: coreHttp.OperationOptions): Promise; + getTrigger(triggerName: string, options?: TriggerGetTriggerOptionalParams): Promise; + listTriggersByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + startTrigger(triggerName: string, options?: coreHttp.OperationOptions): Promise>; + stopTrigger(triggerName: string, options?: coreHttp.OperationOptions): Promise>; + subscribeTriggerToEvents(triggerName: string, options?: coreHttp.OperationOptions): Promise>; + unsubscribeTriggerFromEvents(triggerName: string, options?: coreHttp.OperationOptions): Promise>; +} + // @public export interface TriggerPipelineReference { parameters?: { @@ -6669,6 +6849,14 @@ export interface TriggerRun { readonly triggerType?: string; } +// @public +export class TriggerRunOperation { + constructor(client: ArtifactsClient); + cancelTriggerInstance(triggerName: string, runId: string, options?: coreHttp.OperationOptions): Promise; + queryTriggerRunsByWorkspace(filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions): Promise; + rerunTriggerInstance(triggerName: string, runId: string, options?: coreHttp.OperationOptions): Promise; +} + // @public export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse & { _response: coreHttp.HttpResponse & { @@ -6943,6 +7131,12 @@ export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAcces }; }; +// @public +export class WorkspaceGitRepoManagementOperation { + constructor(client: ArtifactsClient); + getGitHubAccessToken(gitHubAccessTokenRequest: GitHubAccessTokenRequest, options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams): Promise; +} + // @public export interface WorkspaceIdentity { readonly principalId?: string; @@ -6956,6 +7150,12 @@ export interface WorkspaceKeyDetails { name?: string; } +// @public +export class WorkspaceOperation { + constructor(client: ArtifactsClient); + get(options?: coreHttp.OperationOptions): Promise; +} + // @public export interface WorkspaceRepositoryConfiguration { accountName?: string; @@ -7023,10 +7223,6 @@ export type ZohoSource = TabularSource & { }; -// Warnings were encountered during analysis: -// -// src/models/index.ts:16838:5 - (ae-forgotten-export) The symbol "LROResponseInfo" needs to be exported by the entry point index.d.ts - // (No @packageDocumentation comment for this package) ``` diff --git a/sdk/synapse/synapse-artifacts/src/index.ts b/sdk/synapse/synapse-artifacts/src/index.ts index e821f6a6a255..be17558e56e9 100644 --- a/sdk/synapse/synapse-artifacts/src/index.ts +++ b/sdk/synapse/synapse-artifacts/src/index.ts @@ -2,5 +2,13 @@ // Licensed under the MIT license. /// export * from "./models"; +export { LROPoller, LROPollerOptions, LROOperationStep, LROStrategy, LROOperationResponse, LROResponseInfo, BaseResult, LROOperationState, FinalStateVia, SendOperationFn } from "./lro"; export { ArtifactsClient } from "./artifactsClient"; export { ArtifactsClientContext } from "./artifactsClientContext"; +export { BigDataPools as BigDataPoolsOperation, DataFlow as DataFlowOperation, + DataFlowDebugSession as DataFlowDebugSessionOperation, Dataset as DatasetOperation, + WorkspaceGitRepoManagement as WorkspaceGitRepoManagementOperation, Workspace as WorkspaceOperation, + TriggerRun as TriggerRunOperation, Trigger as TriggerOperation, SqlScript as SqlScriptOperation, + SqlPools as SqlPoolsOperation, SparkJobDefinition as SparkJobDefinitionOperation, + PipelineRun as PipelineRunOperation, Pipeline as PipelineOperation, Notebook as NotebookOperation, + LinkedService as LinkedServiceOperation, IntegrationRuntimes as IntegrationRuntimesOperation} from "./operations"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/index.ts b/sdk/synapse/synapse-artifacts/src/lro/index.ts index e67a5ee6fea2..ae5da2477850 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/index.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/index.ts @@ -11,7 +11,9 @@ export { LROOperationStep, LROOperationState, LROStrategy, - LROOperation + LROOperation, + FinalStateVia, + LROOperationResponse } from "./models"; export { makeOperation } from "./operation"; export * from "./locationStrategy"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts index ae121be22ebb..5e79eb789b2d 100644 --- a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -18,7 +18,7 @@ export function lroPolicy() { }; } -class LROPolicy extends BaseRequestPolicy { +export class LROPolicy extends BaseRequestPolicy { constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) { super(nextPolicy, options); } diff --git a/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md b/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md index 01eb0fe7c8c1..cdfedb1dd3f1 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md +++ b/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md @@ -40,10 +40,8 @@ export interface ManagedPrivateEndpointProperties { // @public (undocumented) export class ManagedPrivateEndpointsClient extends ManagedPrivateEndpointsClientContext { constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ManagedPrivateEndpointsClientOptionalParams); - // Warning: (ae-forgotten-export) The symbol "ManagedPrivateEndpoints" needs to be exported by the entry point index.d.ts - // // (undocumented) - managedPrivateEndpoints: ManagedPrivateEndpoints; + managedPrivateEndpoints: ManagedPrivateEndpointsOperation; } // @public (undocumented) @@ -93,6 +91,15 @@ export type ManagedPrivateEndpointsListResponse = ManagedPrivateEndpointListResp }; }; +// @public +export class ManagedPrivateEndpointsOperation { + constructor(client: ManagedPrivateEndpointsClient); + create(managedVirtualNetworkName: string, managedPrivateEndpointName: string, managedPrivateEndpoint: ManagedPrivateEndpoint, options?: coreHttp.OperationOptions): Promise; + delete(managedVirtualNetworkName: string, managedPrivateEndpointName: string, options?: coreHttp.OperationOptions): Promise; + get(managedVirtualNetworkName: string, managedPrivateEndpointName: string, options?: coreHttp.OperationOptions): Promise; + list(managedVirtualNetworkName: string, options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + } + // (No @packageDocumentation comment for this package) diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/index.ts index b3996bea62b4..cd130f2af113 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/src/index.ts +++ b/sdk/synapse/synapse-managed-private-endpoints/src/index.ts @@ -4,3 +4,4 @@ export * from "./models"; export { ManagedPrivateEndpointsClient } from "./managedPrivateEndpointsClient"; export { ManagedPrivateEndpointsClientContext } from "./managedPrivateEndpointsClientContext"; +export { ManagedPrivateEndpoints as ManagedPrivateEndpointsOperation } from "./operations"; diff --git a/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md b/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md index 239afacafa82..316fee8ccc20 100644 --- a/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md +++ b/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md @@ -9,10 +9,8 @@ import * as coreHttp from '@azure/core-http'; // @public (undocumented) export class MonitoringClient extends MonitoringClientContext { constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: MonitoringClientOptionalParams); - // Warning: (ae-forgotten-export) The symbol "Monitoring" needs to be exported by the entry point index.d.ts - // // (undocumented) - monitoring: Monitoring; + monitoring: MonitoringOperation; } // @public (undocumented) @@ -62,6 +60,13 @@ export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { }; }; +// @public +export class MonitoringOperation { + constructor(client: MonitoringClient); + getSparkJobList(options?: MonitoringGetSparkJobListOptionalParams): Promise; + getSqlJobQueryString(options?: MonitoringGetSqlJobQueryStringOptionalParams): Promise; +} + // @public (undocumented) export interface SparkJob { // (undocumented) diff --git a/sdk/synapse/synapse-monitoring/src/index.ts b/sdk/synapse/synapse-monitoring/src/index.ts index ddf0f4390553..36a55288d039 100644 --- a/sdk/synapse/synapse-monitoring/src/index.ts +++ b/sdk/synapse/synapse-monitoring/src/index.ts @@ -3,3 +3,4 @@ export * from "./models"; export { MonitoringClient } from "./monitoringClient"; export { MonitoringClientContext } from "./monitoringClientContext"; +export { Monitoring as MonitoringOperation } from "./operations"; diff --git a/sdk/synapse/synapse-spark/review/synapse-spark.api.md b/sdk/synapse/synapse-spark/review/synapse-spark.api.md index e1689d19b61d..ce5717326e11 100644 --- a/sdk/synapse/synapse-spark/review/synapse-spark.api.md +++ b/sdk/synapse/synapse-spark/review/synapse-spark.api.md @@ -226,17 +226,22 @@ export interface SparkBatchJobState { terminatedAt?: Date | null; } +// @public +export class SparkBatchOperation { + constructor(client: SparkClient); + cancelSparkBatchJob(batchId: number, options?: coreHttp.OperationOptions): Promise; + createSparkBatchJob(sparkBatchJobOptions: SparkBatchJobOptions, options?: SparkBatchCreateSparkBatchJobOptionalParams): Promise; + getSparkBatchJob(batchId: number, options?: SparkBatchGetSparkBatchJobOptionalParams): Promise; + getSparkBatchJobs(options?: SparkBatchGetSparkBatchJobsOptionalParams): Promise; +} + // @public (undocumented) export class SparkClient extends SparkClientContext { constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams); - // Warning: (ae-forgotten-export) The symbol "SparkBatch" needs to be exported by the entry point index.d.ts - // // (undocumented) - sparkBatch: SparkBatch; - // Warning: (ae-forgotten-export) The symbol "SparkSession" needs to be exported by the entry point index.d.ts - // + sparkBatch: SparkBatchOperation; // (undocumented) - sparkSession: SparkSession_2; + sparkSession: SparkSessionOperation; } // @public (undocumented) @@ -458,6 +463,20 @@ export type SparkSessionGetSparkStatementsResponse = SparkStatementCollection & }; }; +// @public +export class SparkSessionOperation { + constructor(client: SparkClient); + cancelSparkSession(sessionId: number, options?: coreHttp.OperationOptions): Promise; + cancelSparkStatement(sessionId: number, statementId: number, options?: coreHttp.OperationOptions): Promise; + createSparkSession(sparkSessionOptions: SparkSessionOptions, options?: SparkSessionCreateSparkSessionOptionalParams): Promise; + createSparkStatement(sessionId: number, sparkStatementOptions: SparkStatementOptions, options?: coreHttp.OperationOptions): Promise; + getSparkSession(sessionId: number, options?: SparkSessionGetSparkSessionOptionalParams): Promise; + getSparkSessions(options?: SparkSessionGetSparkSessionsOptionalParams): Promise; + getSparkStatement(sessionId: number, statementId: number, options?: coreHttp.OperationOptions): Promise; + getSparkStatements(sessionId: number, options?: coreHttp.OperationOptions): Promise; + resetSparkSessionTimeout(sessionId: number, options?: coreHttp.OperationOptions): Promise; +} + // @public (undocumented) export interface SparkSessionOptions { // (undocumented) diff --git a/sdk/synapse/synapse-spark/src/index.ts b/sdk/synapse/synapse-spark/src/index.ts index 1d86e2ab2e56..c0f4231d7111 100644 --- a/sdk/synapse/synapse-spark/src/index.ts +++ b/sdk/synapse/synapse-spark/src/index.ts @@ -3,3 +3,4 @@ export * from "./models"; export { SparkClient } from "./sparkClient"; export { SparkClientContext } from "./sparkClientContext"; +export { SparkBatch as SparkBatchOperation, SparkSession as SparkSessionOperation } from "./operations"; From 5bfd6a0f8026c95b5bce72442a217c33ecf785b2 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Thu, 10 Dec 2020 09:52:25 +1300 Subject: [PATCH 27/28] Export fixes --- sdk/synapse/synapse-artifacts/src/index.ts | 38 +++++++++++++++++----- sdk/synapse/synapse-spark/src/index.ts | 5 ++- 2 files changed, 34 insertions(+), 9 deletions(-) diff --git a/sdk/synapse/synapse-artifacts/src/index.ts b/sdk/synapse/synapse-artifacts/src/index.ts index be17558e56e9..57003284ea6c 100644 --- a/sdk/synapse/synapse-artifacts/src/index.ts +++ b/sdk/synapse/synapse-artifacts/src/index.ts @@ -2,13 +2,35 @@ // Licensed under the MIT license. /// export * from "./models"; -export { LROPoller, LROPollerOptions, LROOperationStep, LROStrategy, LROOperationResponse, LROResponseInfo, BaseResult, LROOperationState, FinalStateVia, SendOperationFn } from "./lro"; +export { + LROPoller, + LROPollerOptions, + LROOperationStep, + LROStrategy, + LROOperationResponse, + LROResponseInfo, + BaseResult, + LROOperationState, + FinalStateVia, + SendOperationFn +} from "./lro"; export { ArtifactsClient } from "./artifactsClient"; export { ArtifactsClientContext } from "./artifactsClientContext"; -export { BigDataPools as BigDataPoolsOperation, DataFlow as DataFlowOperation, - DataFlowDebugSession as DataFlowDebugSessionOperation, Dataset as DatasetOperation, - WorkspaceGitRepoManagement as WorkspaceGitRepoManagementOperation, Workspace as WorkspaceOperation, - TriggerRun as TriggerRunOperation, Trigger as TriggerOperation, SqlScript as SqlScriptOperation, - SqlPools as SqlPoolsOperation, SparkJobDefinition as SparkJobDefinitionOperation, - PipelineRun as PipelineRunOperation, Pipeline as PipelineOperation, Notebook as NotebookOperation, - LinkedService as LinkedServiceOperation, IntegrationRuntimes as IntegrationRuntimesOperation} from "./operations"; +export { + BigDataPools as BigDataPoolsOperation, + DataFlow as DataFlowOperation, + DataFlowDebugSession as DataFlowDebugSessionOperation, + Dataset as DatasetOperation, + WorkspaceGitRepoManagement as WorkspaceGitRepoManagementOperation, + Workspace as WorkspaceOperation, + TriggerRun as TriggerRunOperation, + Trigger as TriggerOperation, + SqlScript as SqlScriptOperation, + SqlPools as SqlPoolsOperation, + SparkJobDefinition as SparkJobDefinitionOperation, + PipelineRun as PipelineRunOperation, + Pipeline as PipelineOperation, + Notebook as NotebookOperation, + LinkedService as LinkedServiceOperation, + IntegrationRuntimes as IntegrationRuntimesOperation +} from "./operations"; diff --git a/sdk/synapse/synapse-spark/src/index.ts b/sdk/synapse/synapse-spark/src/index.ts index c0f4231d7111..5cc2b60083cf 100644 --- a/sdk/synapse/synapse-spark/src/index.ts +++ b/sdk/synapse/synapse-spark/src/index.ts @@ -3,4 +3,7 @@ export * from "./models"; export { SparkClient } from "./sparkClient"; export { SparkClientContext } from "./sparkClientContext"; -export { SparkBatch as SparkBatchOperation, SparkSession as SparkSessionOperation } from "./operations"; +export { + SparkBatch as SparkBatchOperation, + SparkSession as SparkSessionOperation +} from "./operations"; From d5e978b774cc9b114261cc2de3d89e0c2e67eb34 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Date: Thu, 10 Dec 2020 10:05:50 +1300 Subject: [PATCH 28/28] Fix release dates --- sdk/synapse/synapse-access-control/CHANGELOG.md | 2 +- sdk/synapse/synapse-artifacts/CHANGELOG.md | 2 +- sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md | 2 +- sdk/synapse/synapse-monitoring/CHANGELOG.md | 2 +- sdk/synapse/synapse-spark/CHANGELOG.md | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/synapse/synapse-access-control/CHANGELOG.md b/sdk/synapse/synapse-access-control/CHANGELOG.md index f3a1bab8f0ee..106cbca530aa 100644 --- a/sdk/synapse/synapse-access-control/CHANGELOG.md +++ b/sdk/synapse/synapse-access-control/CHANGELOG.md @@ -1,5 +1,5 @@ # Release History -## 1.0.0-beta.1 +## 1.0.0-beta.1 (2020-12-09) - Initial release diff --git a/sdk/synapse/synapse-artifacts/CHANGELOG.md b/sdk/synapse/synapse-artifacts/CHANGELOG.md index f3a1bab8f0ee..106cbca530aa 100644 --- a/sdk/synapse/synapse-artifacts/CHANGELOG.md +++ b/sdk/synapse/synapse-artifacts/CHANGELOG.md @@ -1,5 +1,5 @@ # Release History -## 1.0.0-beta.1 +## 1.0.0-beta.1 (2020-12-09) - Initial release diff --git a/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md b/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md index f3a1bab8f0ee..106cbca530aa 100644 --- a/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md +++ b/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md @@ -1,5 +1,5 @@ # Release History -## 1.0.0-beta.1 +## 1.0.0-beta.1 (2020-12-09) - Initial release diff --git a/sdk/synapse/synapse-monitoring/CHANGELOG.md b/sdk/synapse/synapse-monitoring/CHANGELOG.md index f3a1bab8f0ee..106cbca530aa 100644 --- a/sdk/synapse/synapse-monitoring/CHANGELOG.md +++ b/sdk/synapse/synapse-monitoring/CHANGELOG.md @@ -1,5 +1,5 @@ # Release History -## 1.0.0-beta.1 +## 1.0.0-beta.1 (2020-12-09) - Initial release diff --git a/sdk/synapse/synapse-spark/CHANGELOG.md b/sdk/synapse/synapse-spark/CHANGELOG.md index f3a1bab8f0ee..106cbca530aa 100644 --- a/sdk/synapse/synapse-spark/CHANGELOG.md +++ b/sdk/synapse/synapse-spark/CHANGELOG.md @@ -1,5 +1,5 @@ # Release History -## 1.0.0-beta.1 +## 1.0.0-beta.1 (2020-12-09) - Initial release