diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index 6b810e4068a1..28a84d6d6476 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -46,6 +46,11 @@ dependencies: '@rush-temp/storage-file-share': 'file:projects/storage-file-share.tgz' '@rush-temp/storage-internal-avro': 'file:projects/storage-internal-avro.tgz' '@rush-temp/storage-queue': 'file:projects/storage-queue.tgz' + '@rush-temp/synapse-access-control': 'file:projects/synapse-access-control.tgz' + '@rush-temp/synapse-artifacts': 'file:projects/synapse-artifacts.tgz' + '@rush-temp/synapse-managed-private-endpoints': 'file:projects/synapse-managed-private-endpoints.tgz' + '@rush-temp/synapse-monitoring': 'file:projects/synapse-monitoring.tgz' + '@rush-temp/synapse-spark': 'file:projects/synapse-spark.tgz' '@rush-temp/template': 'file:projects/template.tgz' '@rush-temp/test-utils-perfstress': 'file:projects/test-utils-perfstress.tgz' '@rush-temp/test-utils-recorder': 'file:projects/test-utils-recorder.tgz' @@ -882,6 +887,10 @@ packages: dev: false resolution: integrity: sha512-Agl6xbYP6FOMDeAsr3QVZ+g7Yzg0uhPHWx0j5g4LFdUBHVtqtU+gH660k/lCEe506jJLOGbEzsnqPDTZGJQLag== + /@types/node/14.14.10: + dev: false + resolution: + integrity: sha512-J32dgx2hw8vXrSbu4ZlVhn1Nm3GbeCFNw2FWL8S5QKucHGY0cyNwjdQdO+KMBZ4wpmC7KhLCiNsdk1RFRIYUQQ== /@types/node/8.10.66: dev: false resolution: @@ -1794,6 +1803,12 @@ packages: node: '>=0.10.0' resolution: integrity: sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8= + /builtin-modules/2.0.0: + dev: false + engines: + node: '>=4' + resolution: + integrity: sha512-3U5kUA5VPsRUA3nofm/BXX7GVHKfxz0hOBAPxXrIvHzlDRkQVqEn6yi8QJegxl4LzOHLdvb7XF5dVawa/VVYBg== /builtin-modules/3.1.0: dev: false engines: @@ -6830,6 +6845,15 @@ packages: dev: false resolution: integrity: sha512-xRkB+W/m1KLIzPUmG0ofvR+CPNcvuCuNdjVBVS7ALKSxr3EDhnzNceGkGi1m8MToSli13AzKFYH4ie9w3I5L3g== + /rollup-plugin-node-resolve/3.4.0: + dependencies: + builtin-modules: 2.0.0 + is-module: 1.0.0 + resolve: 1.19.0 + deprecated: This package has been deprecated and is no longer maintained. Please use @rollup/plugin-node-resolve. + dev: false + resolution: + integrity: sha512-PJcd85dxfSBWih84ozRtBkB731OjXk0KnzN0oGp7WOWcarAFkVa71cV5hTJg2qpVsV2U8EUwrzHP3tvy9vS3qg== /rollup-plugin-shim/1.0.0: dev: false resolution: @@ -6896,7 +6920,7 @@ packages: /rollup/1.32.1: dependencies: '@types/estree': 0.0.45 - '@types/node': 8.10.66 + '@types/node': 14.14.10 acorn: 7.4.1 dev: false hasBin: true @@ -9949,7 +9973,7 @@ packages: optionalDependencies: keytar: 5.6.0 resolution: - integrity: sha512-gseNXUeda2sUuauEvkd4xVPlQXbvk3j3ZJTi6FWfC/RgSC3Nw2j26xdxV3lQqTTwipmrOJonnwbQvkvP91nbkQ== + integrity: sha512-2LQ3mAUWaWyF1fnwF68SVXPTfWVJD1Y89swC76mBHMeY+kP7ogTw1ucUf47HnDxS2sp1r+8NH0nOTxocIRKheQ== tarball: 'file:projects/identity.tgz' version: 0.0.0 'file:projects/keyvault-admin.tgz': @@ -10841,6 +10865,81 @@ packages: integrity: sha512-peAeDFKOdGTC2cqKuruSMGA0LCJ2peTAn1UPrf0T+AQQWmhyqJqtMRzdDAbd9gTx/6CGKolRoSfo0Zwtj5apRw== tarball: 'file:projects/storage-queue.tgz' version: 0.0.0 + 'file:projects/synapse-access-control.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.3 + typescript: 3.9.7 + uglify-js: 3.12.1 + dev: false + name: '@rush-temp/synapse-access-control' + resolution: + integrity: sha512-Fm1Nuq7m7yBN0X3MKPsnHVpDROAxneagP+y+cDn6752i6R8UwFuIk8gZA5AzaTpD5gfuRJl2VOiUncEvnqMVSg== + tarball: 'file:projects/synapse-access-control.tgz' + version: 0.0.0 + 'file:projects/synapse-artifacts.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.3 + typescript: 3.9.7 + uglify-js: 3.12.1 + dev: false + name: '@rush-temp/synapse-artifacts' + resolution: + integrity: sha512-z1Mzvmykm4QueMmQH8u3rs2Zo3NKDB6DOUDZpbxQd+uBfJkpCLtF3dVeweKusU1kUZh0lwB5CyEIr48QgGm/3w== + tarball: 'file:projects/synapse-artifacts.tgz' + version: 0.0.0 + 'file:projects/synapse-managed-private-endpoints.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.3 + typescript: 3.9.7 + uglify-js: 3.12.1 + dev: false + name: '@rush-temp/synapse-managed-private-endpoints' + resolution: + integrity: sha512-6aMZRNXwJ7g27eg3aeMp937no7JUghoBSJZ2WnGYJ28mOGii4nNGuTXVad6wVcJc9o5uCsp6hhbjSmzxFszwZQ== + tarball: 'file:projects/synapse-managed-private-endpoints.tgz' + version: 0.0.0 + 'file:projects/synapse-monitoring.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.3 + typescript: 3.9.7 + uglify-js: 3.12.1 + dev: false + name: '@rush-temp/synapse-monitoring' + resolution: + integrity: sha512-GMO/9OUshXiUwhVVPygWF0/pFk9qLgckZWYgOhYh4locL6PRqTcMLOFeq4TVjmzqDv0Fd94MMcirJMhE4I8tFA== + tarball: 'file:projects/synapse-monitoring.tgz' + version: 0.0.0 + 'file:projects/synapse-spark.tgz': + dependencies: + '@microsoft/api-extractor': 7.7.11 + rollup: 1.32.1 + rollup-plugin-node-resolve: 3.4.0 + rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1 + tslib: 2.0.3 + typescript: 3.9.7 + uglify-js: 3.12.1 + dev: false + name: '@rush-temp/synapse-spark' + resolution: + integrity: sha512-N6icSSXfZHpeeQbCHi/Yo8e+rpyLYkShphRxHr8ftxQFSPin80kDZHn/44rwOwm0Uv9TpAJjlYvs+yzzljT7EA== + tarball: 'file:projects/synapse-spark.tgz' + version: 0.0.0 'file:projects/template.tgz': dependencies: '@azure/core-tracing': 1.0.0-preview.9 @@ -11031,6 +11130,11 @@ specifiers: '@rush-temp/storage-file-share': 'file:./projects/storage-file-share.tgz' '@rush-temp/storage-internal-avro': 'file:./projects/storage-internal-avro.tgz' '@rush-temp/storage-queue': 'file:./projects/storage-queue.tgz' + '@rush-temp/synapse-access-control': 'file:./projects/synapse-access-control.tgz' + '@rush-temp/synapse-artifacts': 'file:./projects/synapse-artifacts.tgz' + '@rush-temp/synapse-managed-private-endpoints': 'file:./projects/synapse-managed-private-endpoints.tgz' + '@rush-temp/synapse-monitoring': 'file:./projects/synapse-monitoring.tgz' + '@rush-temp/synapse-spark': 'file:./projects/synapse-spark.tgz' '@rush-temp/template': 'file:./projects/template.tgz' '@rush-temp/test-utils-perfstress': 'file:./projects/test-utils-perfstress.tgz' '@rush-temp/test-utils-recorder': 'file:./projects/test-utils-recorder.tgz' diff --git a/dataplane.code-workspace b/dataplane.code-workspace index 7e9d4a5471e2..c8e252d4a12b 100644 --- a/dataplane.code-workspace +++ b/dataplane.code-workspace @@ -132,6 +132,26 @@ "name": "storage-queue", "path": "sdk/storage/storage-queue" }, + { + "name": "synapse-access-control", + "path": "sdk/synapse/synapse-access-control" + }, + { + "name": "synapse-artifacts", + "path": "sdk/synapse/synapse-artifacts" + }, + { + "name": "synapse-managed-private-endpoints", + "path": "sdk/synapse/synapse-managed-private-endpoints" + }, + { + "name": "synapse-monitoring", + "path": "sdk/synapse/synapse-monitoring" + }, + { + "name": "synapse-spark", + "path": "sdk/synapse/synapse-spark" + }, { "name": "tables", "path": "sdk/tables/data-tables" diff --git a/rush.json b/rush.json index 74ce2252da4a..00e6eebf6045 100644 --- a/rush.json +++ b/rush.json @@ -547,6 +547,31 @@ "projectFolder": "sdk/storage/storage-queue", "versionPolicyName": "client" }, + { + "packageName": "@azure/synapse-access-control", + "projectFolder": "sdk/synapse/synapse-access-control", + "versionPolicyName": "client" + }, + { + "packageName": "@azure/synapse-artifacts", + "projectFolder": "sdk/synapse/synapse-artifacts", + "versionPolicyName": "client" + }, + { + "packageName": "@azure/synapse-managed-private-endpoints", + "projectFolder": "sdk/synapse/synapse-managed-private-endpoints", + "versionPolicyName": "client" + }, + { + "packageName": "@azure/synapse-monitoring", + "projectFolder": "sdk/synapse/synapse-monitoring", + "versionPolicyName": "client" + }, + { + "packageName": "@azure/synapse-spark", + "projectFolder": "sdk/synapse/synapse-spark", + "versionPolicyName": "client" + }, { "packageName": "@azure/data-tables", "projectFolder": "sdk/tables/data-tables", diff --git a/sdk/keyvault/keyvault-secrets/package.json b/sdk/keyvault/keyvault-secrets/package.json index 5521947372bb..1004d0301c37 100644 --- a/sdk/keyvault/keyvault-secrets/package.json +++ b/sdk/keyvault/keyvault-secrets/package.json @@ -10,9 +10,9 @@ "keywords": [ "node", "azure", - "cloud", "typescript", "browser", + "cloud", "isomorphic", "keyvault" ], diff --git a/sdk/synapse/ci.yml b/sdk/synapse/ci.yml new file mode 100644 index 000000000000..9e05bbcb6a4b --- /dev/null +++ b/sdk/synapse/ci.yml @@ -0,0 +1,38 @@ +# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file. + +trigger: + branches: + include: + - master + - release/* + - hotfix/* + paths: + include: + - sdk/synapse/ + +pr: + branches: + include: + - master + - feature/* + - release/* + - hotfix/* + paths: + include: + - sdk/synapse/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: synapse + Artifacts: + - name: azure-synapse-access-control + safeName: azuresynapseaccesscontrol + - name: azure-synapse-artifacts + safeName: azuresynapseartifacts + - name: azure-synapse-managed-private-endpoints + safeName: azuresynapsemanagedprivateendpoints + - name: azure-synapse-monitoring + safeName: azuresynapsemonitoring + - name: azure-synapse-spark + safeName: azuresynapsespark diff --git a/sdk/synapse/synapse-access-control/CHANGELOG.md b/sdk/synapse/synapse-access-control/CHANGELOG.md new file mode 100644 index 000000000000..106cbca530aa --- /dev/null +++ b/sdk/synapse/synapse-access-control/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 (2020-12-09) + +- Initial release diff --git a/sdk/synapse/synapse-access-control/README.md b/sdk/synapse/synapse-access-control/README.md new file mode 100644 index 000000000000..f7882010373f --- /dev/null +++ b/sdk/synapse/synapse-access-control/README.md @@ -0,0 +1,64 @@ +## Azure Synapse Access Control client library for JavaScript + +This package contains an isomorphic SDK for Access Control. + +## Getting started + +### Install the package + +```bash +npm install @azure/synapse-access-control +``` + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts + +## Examples + +```ts +import { AccessControlClient } from "@azure/synapse-access-control"; +import { DefaultAzureCredential } from "@azure/identity"; + +export async function main(): Promise { + const credential = new DefaultAzureCredential(); + + let client = new AccessControlClient( + credential, + "https://joturnersynapsetest.dev.azuresynapse.net" + ); + let list = await client.listRoleDefinitions(); + for await (let item of list) { + console.log("item:", item); + } +} +``` + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-access-control/api-extractor.json b/sdk/synapse/synapse-access-control/api-extractor.json new file mode 100644 index 000000000000..3bc7a3670741 --- /dev/null +++ b/sdk/synapse/synapse-access-control/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./types/synapse-access-control.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-access-control/package.json b/sdk/synapse/synapse-access-control/package.json new file mode 100644 index 000000000000..9c3631e7d089 --- /dev/null +++ b/sdk/synapse/synapse-access-control/package.json @@ -0,0 +1,75 @@ +{ + "name": "@azure/synapse-access-control", + "author": "Microsoft Corporation", + "description": "A generated SDK for AccessControlClient.", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-access-control/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "sdk-type": "client", + "version": "1.0.0-beta.1", + "dependencies": { + "@azure/core-paging": "^1.1.1", + "@azure/core-http": "^1.2.0", + "tslib": "^2.0.0" + }, + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "cloud", + "isomorphic" + ], + "license": "MIT", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", + "types": "./types/synapse-access-control.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", + "uglify-js": "^3.4.9", + "@opentelemetry/api": "^0.10.2", + "@microsoft/api-extractor": "7.7.11" + }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "dist-esm/**/*.js", + "dist-esm/**/*.js.map", + "dist-esm/**/*.d.ts", + "dist-esm/**/*.d.ts.map", + "types/synapse-access-control.d.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", + "pack": "npm pack 2>&1", + "test": "echo skip", + "build:test": "echo skip", + "lint": "echo skip", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-access-control/review/synapse-access-control.api.md b/sdk/synapse/synapse-access-control/review/synapse-access-control.api.md new file mode 100644 index 000000000000..70d8b36d1919 --- /dev/null +++ b/sdk/synapse/synapse-access-control/review/synapse-access-control.api.md @@ -0,0 +1,162 @@ +## API Report File for "@azure/synapse-access-control" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; + +// @public (undocumented) +export class AccessControlClient extends AccessControlClientContext { + createRoleAssignment(createRoleAssignmentOptions: CreateRoleAssignmentOptions, options?: coreHttp.OperationOptions): Promise; + deleteRoleAssignmentById(roleAssignmentId: string, options?: coreHttp.OperationOptions): Promise; + getCallerRoleAssignments(options?: coreHttp.OperationOptions): Promise; + getRoleAssignmentById(roleAssignmentId: string, options?: coreHttp.OperationOptions): Promise; + getRoleAssignments(options?: AccessControlClientGetRoleAssignmentsOptionalParams): Promise; + getRoleDefinitionById(roleId: string, options?: coreHttp.OperationOptions): Promise; + listRoleDefinitions(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; +} + +// @public (undocumented) +export class AccessControlClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: AccessControlClientOptionalParams); + // (undocumented) + apiVersion: string; + // (undocumented) + endpoint: string; +} + +// @public +export type AccessControlClientCreateRoleAssignmentResponse = RoleAssignmentDetails & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RoleAssignmentDetails; + }; +}; + +// @public +export type AccessControlClientGetCallerRoleAssignmentsResponse = { + body: string[]; + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: string[]; + }; +}; + +// @public +export type AccessControlClientGetRoleAssignmentByIdResponse = RoleAssignmentDetails & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RoleAssignmentDetails; + }; +}; + +// @public +export interface AccessControlClientGetRoleAssignmentsHeaders { + xMsContinuation?: string; +} + +// @public +export interface AccessControlClientGetRoleAssignmentsOptionalParams extends coreHttp.OperationOptions { + continuationToken?: string; + principalId?: string; + roleId?: string; +} + +// @public +export type AccessControlClientGetRoleAssignmentsResponse = AccessControlClientGetRoleAssignmentsHeaders & RoleAssignmentDetails[] & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RoleAssignmentDetails[]; + parsedHeaders: AccessControlClientGetRoleAssignmentsHeaders; + }; +}; + +// @public +export type AccessControlClientGetRoleDefinitionByIdResponse = SynapseRole & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SynapseRole; + }; +}; + +// @public +export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RolesListResponse; + }; +}; + +// @public +export type AccessControlClientGetRoleDefinitionsResponse = RolesListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: RolesListResponse; + }; +}; + +// @public +export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { + apiVersion?: string; + endpoint?: string; +} + +// @public +export interface CreateRoleAssignmentOptions { + principalId: string; + roleId: string; +} + +// @public +export interface ErrorContract { + error?: ErrorResponse; +} + +// @public (undocumented) +export interface ErrorDetail { + // (undocumented) + code: string; + // (undocumented) + message: string; + // (undocumented) + target?: string; +} + +// @public (undocumented) +export interface ErrorResponse { + // (undocumented) + code: string; + // (undocumented) + details?: ErrorDetail[]; + // (undocumented) + message: string; + // (undocumented) + target?: string; +} + +// @public +export interface RoleAssignmentDetails { + id?: string; + principalId?: string; + roleId?: string; +} + +// @public +export interface RolesListResponse { + nextLink?: string; + value: SynapseRole[]; +} + +// @public +export interface SynapseRole { + id?: string; + isBuiltIn: boolean; + name?: string; +} + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-access-control/rollup.config.js b/sdk/synapse/synapse-access-control/rollup.config.js new file mode 100644 index 000000000000..d5699103e5d1 --- /dev/null +++ b/sdk/synapse/synapse-access-control/rollup.config.js @@ -0,0 +1,41 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./dist-esm/accessControlClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/index.js", + format: "cjs", + name: "Azure.SynapseAccessControl", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] +}; + +export default config; diff --git a/sdk/synapse/synapse-access-control/src/accessControlClient.ts b/sdk/synapse/synapse-access-control/src/accessControlClient.ts new file mode 100644 index 000000000000..9ce41622d5e6 --- /dev/null +++ b/sdk/synapse/synapse-access-control/src/accessControlClient.ts @@ -0,0 +1,472 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "./tracing"; +import * as Parameters from "./models/parameters"; +import * as Mappers from "./models/mappers"; +import { AccessControlClientContext } from "./accessControlClientContext"; +import { + SynapseRole, + AccessControlClientGetRoleDefinitionsResponse, + AccessControlClientGetRoleDefinitionByIdResponse, + CreateRoleAssignmentOptions, + AccessControlClientCreateRoleAssignmentResponse, + AccessControlClientGetRoleAssignmentsOptionalParams, + AccessControlClientGetRoleAssignmentsResponse, + AccessControlClientGetRoleAssignmentByIdResponse, + AccessControlClientGetCallerRoleAssignmentsResponse, + AccessControlClientGetRoleDefinitionsNextResponse +} from "./models"; + +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getRoleDefinitionsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roles", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RolesListResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getRoleDefinitionByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roles/{roleId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SynapseRole + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleId], + headerParameters: [Parameters.accept], + serializer +}; +const createRoleAssignmentOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.RoleAssignmentDetails + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + requestBody: Parameters.createRoleAssignmentOptions, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "RoleAssignmentDetails" } + } + } + }, + headersMapper: Mappers.AccessControlClientGetRoleAssignmentsHeaders + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion, Parameters.roleId1, Parameters.principalId], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.continuationToken], + serializer +}; +const getRoleAssignmentByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments/{roleAssignmentId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RoleAssignmentDetails + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleAssignmentId], + headerParameters: [Parameters.accept], + serializer +}; +const deleteRoleAssignmentByIdOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/roleAssignments/{roleAssignmentId}", + httpMethod: "DELETE", + responses: { + 200: {}, + 204: {}, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.roleAssignmentId], + headerParameters: [Parameters.accept], + serializer +}; +const getCallerRoleAssignmentsOperationSpec: coreHttp.OperationSpec = { + path: "/rbac/getMyAssignedRoles", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Sequence", element: { type: { name: "String" } } } + } + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getRoleDefinitionsNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.RolesListResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; + +export class AccessControlClient extends AccessControlClientContext { + /** + * List roles. + * @param options The options parameters. + */ + public listRoleDefinitions( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getRoleDefinitionsPagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getRoleDefinitionsPagingPage(options); + } + }; + } + + private async *getRoleDefinitionsPagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getRoleDefinitions(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getRoleDefinitionsNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getRoleDefinitionsPagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getRoleDefinitionsPagingPage(options)) { + yield* page; + } + } + + /** + * List roles. + * @param options The options parameters. + */ + private async _getRoleDefinitions( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-_getRoleDefinitions", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleDefinitionsOperationSpec + ); + return result as AccessControlClientGetRoleDefinitionsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Get role by role Id. + * @param roleId Synapse Built-In Role Id. + * @param options The options parameters. + */ + async getRoleDefinitionById( + roleId: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-getRoleDefinitionById", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + roleId, + options: updatedOptions + }; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleDefinitionByIdOperationSpec + ); + return result as AccessControlClientGetRoleDefinitionByIdResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Create role assignment. + * @param createRoleAssignmentOptions Details of role id and object id. + * @param options The options parameters. + */ + async createRoleAssignment( + createRoleAssignmentOptions: CreateRoleAssignmentOptions, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-createRoleAssignment", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + createRoleAssignmentOptions, + options: updatedOptions + }; + try { + const result = await this.sendOperationRequest( + operationArguments, + createRoleAssignmentOperationSpec + ); + return result as AccessControlClientCreateRoleAssignmentResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * List role assignments. + * @param options The options parameters. + */ + async getRoleAssignments( + options?: AccessControlClientGetRoleAssignmentsOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-getRoleAssignments", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleAssignmentsOperationSpec + ); + return result as AccessControlClientGetRoleAssignmentsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Get role assignment by role assignment Id. + * @param roleAssignmentId The ID of the role assignment. + * @param options The options parameters. + */ + async getRoleAssignmentById( + roleAssignmentId: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-getRoleAssignmentById", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + roleAssignmentId, + options: updatedOptions + }; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleAssignmentByIdOperationSpec + ); + return result as AccessControlClientGetRoleAssignmentByIdResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Delete role assignment by role assignment Id. + * @param roleAssignmentId The ID of the role assignment. + * @param options The options parameters. + */ + async deleteRoleAssignmentById( + roleAssignmentId: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-deleteRoleAssignmentById", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + roleAssignmentId, + options: updatedOptions + }; + try { + const result = await this.sendOperationRequest( + operationArguments, + deleteRoleAssignmentByIdOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * List role assignments of the caller. + * @param options The options parameters. + */ + async getCallerRoleAssignments( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-getCallerRoleAssignments", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.sendOperationRequest( + operationArguments, + getCallerRoleAssignmentsOperationSpec + ); + return result as AccessControlClientGetCallerRoleAssignmentsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * GetRoleDefinitionsNext + * @param nextLink The nextLink from the previous successful call to the GetRoleDefinitions method. + * @param options The options parameters. + */ + private async _getRoleDefinitionsNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "AccessControlClient-_getRoleDefinitionsNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.sendOperationRequest( + operationArguments, + getRoleDefinitionsNextOperationSpec + ); + return result as AccessControlClientGetRoleDefinitionsNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} diff --git a/sdk/synapse/synapse-access-control/src/accessControlClientContext.ts b/sdk/synapse/synapse-access-control/src/accessControlClientContext.ts new file mode 100644 index 000000000000..191b933fe6a6 --- /dev/null +++ b/sdk/synapse/synapse-access-control/src/accessControlClientContext.ts @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { AccessControlClientOptionalParams } from "./models"; + +const packageName = "@azure/synapse-access-control"; +const packageVersion = "1.0.0"; + +export class AccessControlClientContext extends coreHttp.ServiceClient { + endpoint: string; + apiVersion: string; + + /** + * Initializes a new instance of the AccessControlClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: AccessControlClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + if (!options.credentialScopes) { + options.credentialScopes = ["https://dev.azuresynapse.net/.default"]; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = options.endpoint || "{endpoint}"; + + // Parameter assignments + this.endpoint = endpoint; + + // Assigning values to Constant parameters + this.apiVersion = options.apiVersion || "2020-02-01-preview"; + } +} diff --git a/sdk/synapse/synapse-access-control/src/index.ts b/sdk/synapse/synapse-access-control/src/index.ts new file mode 100644 index 000000000000..21d9a7056568 --- /dev/null +++ b/sdk/synapse/synapse-access-control/src/index.ts @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +export * from "./models"; +export { AccessControlClient } from "./accessControlClient"; +export { AccessControlClientContext } from "./accessControlClientContext"; diff --git a/sdk/synapse/synapse-access-control/src/models/index.ts b/sdk/synapse/synapse-access-control/src/models/index.ts new file mode 100644 index 000000000000..4fa06c5c841a --- /dev/null +++ b/sdk/synapse/synapse-access-control/src/models/index.ts @@ -0,0 +1,283 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +/** + * A list of Synapse roles available. + */ +export interface RolesListResponse { + /** + * List of Synapse roles. + */ + value: SynapseRole[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Synapse role details + */ +export interface SynapseRole { + /** + * Role ID + */ + id?: string; + /** + * Name of the Synapse role + */ + name?: string; + /** + * Is a built-in role or not + */ + isBuiltIn: boolean; +} + +/** + * Contains details when the response code indicates an error. + */ +export interface ErrorContract { + /** + * The error details. + */ + error?: ErrorResponse; +} + +export interface ErrorResponse { + code: string; + message: string; + target?: string; + details?: ErrorDetail[]; +} + +export interface ErrorDetail { + code: string; + message: string; + target?: string; +} + +/** + * Role Assignment request details + */ +export interface CreateRoleAssignmentOptions { + /** + * Role ID of the Synapse Built-In Role + */ + roleId: string; + /** + * Object ID of the AAD principal or security-group + */ + principalId: string; +} + +/** + * Role Assignment response details + */ +export interface RoleAssignmentDetails { + /** + * Role Assignment ID + */ + id?: string; + /** + * Role ID of the Synapse Built-In Role + */ + roleId?: string; + /** + * Object ID of the AAD principal or security-group + */ + principalId?: string; +} + +/** + * Defines headers for AccessControlClient_getRoleAssignments operation. + */ +export interface AccessControlClientGetRoleAssignmentsHeaders { + /** + * If the number of role assignments to be listed exceeds the maxResults limit, a continuation token is returned in this response header. When a continuation token is returned in the response, it must be specified in a subsequent invocation of the list operation to continue listing the role assignments. + */ + xMsContinuation?: string; +} + +/** + * Contains response data for the getRoleDefinitions operation. + */ +export type AccessControlClientGetRoleDefinitionsResponse = RolesListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RolesListResponse; + }; +}; + +/** + * Contains response data for the getRoleDefinitionById operation. + */ +export type AccessControlClientGetRoleDefinitionByIdResponse = SynapseRole & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SynapseRole; + }; +}; + +/** + * Contains response data for the createRoleAssignment operation. + */ +export type AccessControlClientCreateRoleAssignmentResponse = RoleAssignmentDetails & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RoleAssignmentDetails; + }; +}; + +/** + * Optional parameters. + */ +export interface AccessControlClientGetRoleAssignmentsOptionalParams + extends coreHttp.OperationOptions { + /** + * Synapse Built-In Role Id. + */ + roleId?: string; + /** + * Object ID of the AAD principal or security-group. + */ + principalId?: string; + /** + * Continuation token. + */ + continuationToken?: string; +} + +/** + * Contains response data for the getRoleAssignments operation. + */ +export type AccessControlClientGetRoleAssignmentsResponse = AccessControlClientGetRoleAssignmentsHeaders & + RoleAssignmentDetails[] & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RoleAssignmentDetails[]; + /** + * The parsed HTTP response headers. + */ + parsedHeaders: AccessControlClientGetRoleAssignmentsHeaders; + }; + }; + +/** + * Contains response data for the getRoleAssignmentById operation. + */ +export type AccessControlClientGetRoleAssignmentByIdResponse = RoleAssignmentDetails & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RoleAssignmentDetails; + }; +}; + +/** + * Contains response data for the getCallerRoleAssignments operation. + */ +export type AccessControlClientGetCallerRoleAssignmentsResponse = { + /** + * The parsed response body. + */ + body: string[]; + + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: string[]; + }; +}; + +/** + * Contains response data for the getRoleDefinitionsNext operation. + */ +export type AccessControlClientGetRoleDefinitionsNextResponse = RolesListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: RolesListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface AccessControlClientOptionalParams extends coreHttp.ServiceClientOptions { + /** + * Api Version + */ + apiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-access-control/src/models/mappers.ts b/sdk/synapse/synapse-access-control/src/models/mappers.ts new file mode 100644 index 000000000000..18f6ab27e9c9 --- /dev/null +++ b/sdk/synapse/synapse-access-control/src/models/mappers.ts @@ -0,0 +1,210 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +export const RolesListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RolesListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SynapseRole" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const SynapseRole: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SynapseRole", + modelProperties: { + id: { + serializedName: "id", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + isBuiltIn: { + serializedName: "isBuiltIn", + required: true, + type: { + name: "Boolean" + } + } + } + } +}; + +export const ErrorContract: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorContract", + modelProperties: { + error: { + serializedName: "error", + type: { + name: "Composite", + className: "ErrorResponse" + } + } + } + } +}; + +export const ErrorResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorResponse", + modelProperties: { + code: { + serializedName: "code", + required: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + required: true, + type: { + name: "String" + } + }, + target: { + serializedName: "target", + type: { + name: "String" + } + }, + details: { + serializedName: "details", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorDetail" + } + } + } + } + } + } +}; + +export const ErrorDetail: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorDetail", + modelProperties: { + code: { + serializedName: "code", + required: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + required: true, + type: { + name: "String" + } + }, + target: { + serializedName: "target", + type: { + name: "String" + } + } + } + } +}; + +export const RoleAssignmentOptions: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RoleAssignmentOptions", + modelProperties: { + roleId: { + serializedName: "roleId", + required: true, + type: { + name: "String" + } + }, + principalId: { + serializedName: "principalId", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const RoleAssignmentDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RoleAssignmentDetails", + modelProperties: { + id: { + serializedName: "id", + type: { + name: "String" + } + }, + roleId: { + serializedName: "roleId", + type: { + name: "String" + } + }, + principalId: { + serializedName: "principalId", + type: { + name: "String" + } + } + } + } +}; + +export const AccessControlClientGetRoleAssignmentsHeaders: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AccessControlClientGetRoleAssignmentsHeaders", + modelProperties: { + xMsContinuation: { + serializedName: "x-ms-continuation", + type: { + name: "String" + } + } + } + } +}; diff --git a/sdk/synapse/synapse-access-control/src/models/parameters.ts b/sdk/synapse/synapse-access-control/src/models/parameters.ts new file mode 100644 index 000000000000..9d0b4b306b5d --- /dev/null +++ b/sdk/synapse/synapse-access-control/src/models/parameters.ts @@ -0,0 +1,128 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; +import { RoleAssignmentOptions as RoleAssignmentOptionsMapper } from "../models/mappers"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const apiVersion: OperationQueryParameter = { + parameterPath: "apiVersion", + mapper: { + defaultValue: "2020-02-01-preview", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const roleId: OperationURLParameter = { + parameterPath: "roleId", + mapper: { + serializedName: "roleId", + required: true, + type: { + name: "String" + } + } +}; + +export const contentType: OperationParameter = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; + +export const createRoleAssignmentOptions: OperationParameter = { + parameterPath: "createRoleAssignmentOptions", + mapper: RoleAssignmentOptionsMapper +}; + +export const roleId1: OperationQueryParameter = { + parameterPath: ["options", "roleId"], + mapper: { + serializedName: "roleId", + type: { + name: "String" + } + } +}; + +export const principalId: OperationQueryParameter = { + parameterPath: ["options", "principalId"], + mapper: { + serializedName: "principalId", + type: { + name: "String" + } + } +}; + +export const continuationToken: OperationParameter = { + parameterPath: ["options", "continuationToken"], + mapper: { + serializedName: "x-ms-continuation", + type: { + name: "String" + } + } +}; + +export const roleAssignmentId: OperationURLParameter = { + parameterPath: "roleAssignmentId", + mapper: { + constraints: { + MinLength: 1 + }, + serializedName: "roleAssignmentId", + required: true, + type: { + name: "String" + } + } +}; + +export const nextLink: OperationURLParameter = { + parameterPath: "nextLink", + mapper: { + serializedName: "nextLink", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; diff --git a/sdk/synapse/synapse-access-control/src/tracing.ts b/sdk/synapse/synapse-access-control/src/tracing.ts new file mode 100644 index 000000000000..bd5964daaed0 --- /dev/null +++ b/sdk/synapse/synapse-access-control/src/tracing.ts @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.AccessControl", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-access-control/tsconfig.json b/sdk/synapse/synapse-access-control/tsconfig.json new file mode 100644 index 000000000000..d43efedfc9bc --- /dev/null +++ b/sdk/synapse/synapse-access-control/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./dist-esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/sdk/synapse/synapse-artifacts/CHANGELOG.md b/sdk/synapse/synapse-artifacts/CHANGELOG.md new file mode 100644 index 000000000000..106cbca530aa --- /dev/null +++ b/sdk/synapse/synapse-artifacts/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 (2020-12-09) + +- Initial release diff --git a/sdk/synapse/synapse-artifacts/README.md b/sdk/synapse/synapse-artifacts/README.md new file mode 100644 index 000000000000..1301c7c4ceea --- /dev/null +++ b/sdk/synapse/synapse-artifacts/README.md @@ -0,0 +1,61 @@ +## Azure Synapse Artifacts client library for JavaScript + +This package contains an isomorphic SDK for Artifacts. + +## Getting started + +### Install the package + +```bash +npm install @azure/synapse-artifacts +``` + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts + +## Examples + +```ts +import { ArtifactsClient } from "@azure/synapse-artifacts"; +import { DefaultAzureCredential } from "@azure/identity"; + +export async function main(): Promise { + const credential = new DefaultAzureCredential(); + + let client = new ArtifactsClient(credential, "https://mysynapse.dev.azuresynapse.net"); + let list = await client.pipeline.listPipelinesByWorkspace(); + for await (let item of list) { + console.log("item:", item); + } +} +``` + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-artifacts/api-extractor.json b/sdk/synapse/synapse-artifacts/api-extractor.json new file mode 100644 index 000000000000..a5982c5913e1 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./types/synapse-artifacts.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-artifacts/package.json b/sdk/synapse/synapse-artifacts/package.json new file mode 100644 index 000000000000..519273b927e9 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/package.json @@ -0,0 +1,76 @@ +{ + "name": "@azure/synapse-artifacts", + "author": "Microsoft Corporation", + "description": "A generated SDK for ArtifactsClient.", + "sdk-type": "client", + "version": "1.0.0-beta.1", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-artifacts/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "dependencies": { + "@azure/core-lro": "^1.0.2", + "@azure/core-paging": "^1.1.1", + "@azure/core-http": "^1.2.0", + "@opentelemetry/api": "^0.10.2", + "tslib": "^2.0.0" + }, + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "cloud", + "isomorphic" + ], + "license": "MIT", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", + "types": "./types/synapse-artifacts.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "types/synapse-artifacts.d.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", + "pack": "npm pack 2>&1", + "build:test": "echo skip", + "lint": "echo skipped", + "test": "echo skip", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md new file mode 100644 index 000000000000..5ca1f8d5ad29 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/review/synapse-artifacts.api.md @@ -0,0 +1,7228 @@ +## API Report File for "@azure/synapse-artifacts" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; +import { HttpMethods } from '@azure/core-http'; +import { HttpOperationResponse } from '@azure/core-http'; +import { OperationArguments } from '@azure/core-http'; +import { OperationSpec } from '@azure/core-http'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { Poller } from '@azure/core-lro'; +import { PollOperationState } from '@azure/core-lro'; +import { RestResponse } from '@azure/core-http'; + +// @public +export interface Activity { + [property: string]: any; + dependsOn?: ActivityDependency[]; + description?: string; + name: string; + type: "Container" | "Execution" | "Copy" | "HDInsightHive" | "HDInsightPig" | "HDInsightMapReduce" | "HDInsightStreaming" | "HDInsightSpark" | "ExecuteSSISPackage" | "Custom" | "SqlServerStoredProcedure" | "ExecutePipeline" | "Delete" | "AzureDataExplorerCommand" | "Lookup" | "WebActivity" | "GetMetadata" | "IfCondition" | "Switch" | "ForEach" | "AzureMLBatchExecution" | "AzureMLUpdateResource" | "AzureMLExecutePipeline" | "DataLakeAnalyticsU-SQL" | "Wait" | "Until" | "Validation" | "Filter" | "DatabricksNotebook" | "DatabricksSparkJar" | "DatabricksSparkPython" | "SetVariable" | "AppendVariable" | "AzureFunctionActivity" | "WebHook" | "ExecuteDataFlow" | "SynapseNotebook" | "SparkJob" | "SqlPoolStoredProcedure"; + userProperties?: UserProperty[]; +} + +// @public +export interface ActivityDependency { + [property: string]: any; + activity: string; + dependencyConditions: DependencyCondition[]; +} + +// @public +export interface ActivityPolicy { + [property: string]: any; + retry?: any; + retryIntervalInSeconds?: number; + secureInput?: boolean; + secureOutput?: boolean; + timeout?: any; +} + +// @public +export interface ActivityRun { + [property: string]: any; + readonly activityName?: string; + readonly activityRunEnd?: Date; + readonly activityRunId?: string; + readonly activityRunStart?: Date; + readonly activityType?: string; + readonly durationInMs?: number; + readonly error?: any; + readonly input?: any; + readonly linkedServiceName?: string; + readonly output?: any; + readonly pipelineName?: string; + readonly pipelineRunId?: string; + readonly status?: string; +} + +// @public +export interface ActivityRunsQueryResponse { + continuationToken?: string; + value: ActivityRun[]; +} + +// @public (undocumented) +export type ActivityUnion = Activity | ControlActivity | ExecutionActivityUnion | ExecutePipelineActivity | IfConditionActivity | SwitchActivity | ForEachActivity | WaitActivity | UntilActivity | ValidationActivity | FilterActivity | SetVariableActivity | AppendVariableActivity | WebHookActivity | SqlPoolStoredProcedureActivity; + +// @public +export interface AddDataFlowToDebugSessionResponse { + jobVersion?: string; +} + +// @public +export type AmazonMWSLinkedService = LinkedService & { + type: "AmazonMWS"; + endpoint: any; + marketplaceID: any; + sellerID: any; + mwsAuthToken?: SecretBaseUnion; + accessKeyId: any; + secretKey?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type AmazonMWSObjectDataset = Dataset & { + type: "AmazonMWSObject"; + tableName?: any; +}; + +// @public +export type AmazonMWSSource = TabularSource & { + type: "AmazonMWSSource"; + query?: any; +}; + +// @public +export type AmazonRedshiftLinkedService = LinkedService & { + type: "AmazonRedshift"; + server: any; + username?: any; + password?: SecretBaseUnion; + database: any; + port?: any; + encryptedCredential?: any; +}; + +// @public +export type AmazonRedshiftSource = TabularSource & { + type: "AmazonRedshiftSource"; + query?: any; + redshiftUnloadSettings?: RedshiftUnloadSettings; +}; + +// @public +export type AmazonRedshiftTableDataset = Dataset & { + type: "AmazonRedshiftTable"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type AmazonS3LinkedService = LinkedService & { + type: "AmazonS3"; + accessKeyId?: any; + secretAccessKey?: SecretBaseUnion; + serviceUrl?: any; + encryptedCredential?: any; +}; + +// @public +export type AmazonS3Location = DatasetLocation & { + type: "AmazonS3Location"; + bucketName?: any; + version?: any; +}; + +// @public +export type AmazonS3ReadSettings = StoreReadSettings & { + type: "AmazonS3ReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + prefix?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AppendVariableActivity = Activity & { + type: "AppendVariable"; + variableName?: string; + value?: any; +}; + +// @public +export interface ArtifactRenameRequest { + newName?: string; +} + +// @public (undocumented) +export class ArtifactsClient extends ArtifactsClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ArtifactsClientOptionalParams); + // (undocumented) + bigDataPools: BigDataPoolsOperation; + // (undocumented) + dataFlow: DataFlowOperation; + // (undocumented) + dataFlowDebugSession: DataFlowDebugSessionOperation; + // (undocumented) + dataset: DatasetOperation; + // (undocumented) + integrationRuntimes: IntegrationRuntimesOperation; + // (undocumented) + linkedService: LinkedServiceOperation; + // (undocumented) + notebook: NotebookOperation; + // (undocumented) + pipeline: PipelineOperation; + // (undocumented) + pipelineRun: PipelineRunOperation; + // (undocumented) + sparkJobDefinition: SparkJobDefinitionOperation; + // (undocumented) + sqlPools: SqlPoolsOperation; + // (undocumented) + sqlScript: SqlScriptOperation; + // (undocumented) + trigger: TriggerOperation; + // (undocumented) + triggerRun: TriggerRunOperation; + // (undocumented) + workspace: WorkspaceOperation; + // (undocumented) + workspaceGitRepoManagement: WorkspaceGitRepoManagementOperation; +} + +// @public (undocumented) +export class ArtifactsClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ArtifactsClientOptionalParams); + // (undocumented) + apiVersion: string; + // (undocumented) + endpoint: string; +} + +// @public +export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { + apiVersion?: string; + endpoint?: string; +} + +// @public +export interface AutoPauseProperties { + delayInMinutes?: number; + enabled?: boolean; +} + +// @public +export interface AutoScaleProperties { + enabled?: boolean; + maxNodeCount?: number; + minNodeCount?: number; +} + +// @public +export type AvroCompressionCodec = string; + +// @public +export type AvroDataset = Dataset & { + type: "Avro"; + location?: DatasetLocationUnion; + avroCompressionCodec?: AvroCompressionCodec; + avroCompressionLevel?: number; +}; + +// @public +export type AvroFormat = DatasetStorageFormat & { + type: "AvroFormat"; +}; + +// @public +export type AvroSink = CopySink & { + type: "AvroSink"; + storeSettings?: StoreWriteSettingsUnion; + formatSettings?: AvroWriteSettings; +}; + +// @public +export type AvroSource = CopySource & { + type: "AvroSource"; + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export type AvroWriteSettings = FormatWriteSettings & { + type: "AvroWriteSettings"; + recordName?: string; + recordNamespace?: string; +}; + +// @public +export type AzureBatchLinkedService = LinkedService & { + type: "AzureBatch"; + accountName: any; + accessKey?: SecretBaseUnion; + batchUri: any; + poolName: any; + linkedServiceName: LinkedServiceReference; + encryptedCredential?: any; +}; + +// @public +export type AzureBlobFSLinkedService = LinkedService & { + type: "AzureBlobFS"; + url: any; + accountKey?: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureBlobFSLocation = DatasetLocation & { + type: "AzureBlobFSLocation"; + fileSystem?: any; +}; + +// @public +export type AzureBlobFSReadSettings = StoreReadSettings & { + type: "AzureBlobFSReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AzureBlobFSSink = CopySink & { + type: "AzureBlobFSSink"; + copyBehavior?: any; +}; + +// @public +export type AzureBlobFSSource = CopySource & { + type: "AzureBlobFSSource"; + treatEmptyAsNull?: any; + skipHeaderLineCount?: any; + recursive?: any; +}; + +// @public +export type AzureBlobFSWriteSettings = StoreWriteSettings & { + type: "AzureBlobFSWriteSettings"; + blockSizeInMB?: any; +}; + +// @public +export type AzureBlobStorageLinkedService = LinkedService & { + type: "AzureBlobStorage"; + connectionString?: any; + accountKey?: AzureKeyVaultSecretReference; + sasUri?: any; + sasToken?: AzureKeyVaultSecretReference; + serviceEndpoint?: string; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: string; +}; + +// @public +export type AzureBlobStorageLocation = DatasetLocation & { + type: "AzureBlobStorageLocation"; + container?: any; +}; + +// @public +export type AzureBlobStorageReadSettings = StoreReadSettings & { + type: "AzureBlobStorageReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + prefix?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AzureBlobStorageWriteSettings = StoreWriteSettings & { + type: "AzureBlobStorageWriteSettings"; + blockSizeInMB?: any; +}; + +// @public +export type AzureDatabricksLinkedService = LinkedService & { + type: "AzureDatabricks"; + domain: any; + accessToken: SecretBaseUnion; + existingClusterId?: any; + instancePoolId?: any; + newClusterVersion?: any; + newClusterNumOfWorker?: any; + newClusterNodeType?: any; + newClusterSparkConf?: { + [propertyName: string]: any; + }; + newClusterSparkEnvVars?: { + [propertyName: string]: any; + }; + newClusterCustomTags?: { + [propertyName: string]: any; + }; + newClusterDriverNodeType?: any; + newClusterInitScripts?: any; + newClusterEnableElasticDisk?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureDataExplorerCommandActivity = ExecutionActivity & { + type: "AzureDataExplorerCommand"; + command: any; + commandTimeout?: any; +}; + +// @public +export type AzureDataExplorerLinkedService = LinkedService & { + type: "AzureDataExplorer"; + endpoint: any; + servicePrincipalId: any; + servicePrincipalKey: SecretBaseUnion; + database: any; + tenant: any; +}; + +// @public +export type AzureDataExplorerSink = CopySink & { + type: "AzureDataExplorerSink"; + ingestionMappingName?: any; + ingestionMappingAsJson?: any; + flushImmediately?: any; +}; + +// @public +export type AzureDataExplorerSource = CopySource & { + type: "AzureDataExplorerSource"; + query: any; + noTruncation?: any; + queryTimeout?: any; +}; + +// @public +export type AzureDataExplorerTableDataset = Dataset & { + type: "AzureDataExplorerTable"; + table?: any; +}; + +// @public +export type AzureDataLakeAnalyticsLinkedService = LinkedService & { + type: "AzureDataLakeAnalytics"; + accountName: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant: any; + subscriptionId?: any; + resourceGroupName?: any; + dataLakeAnalyticsUri?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureDataLakeStoreLinkedService = LinkedService & { + type: "AzureDataLakeStore"; + dataLakeStoreUri: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + accountName?: any; + subscriptionId?: any; + resourceGroupName?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureDataLakeStoreLocation = DatasetLocation & { + type: "AzureDataLakeStoreLocation"; +}; + +// @public +export type AzureDataLakeStoreReadSettings = StoreReadSettings & { + type: "AzureDataLakeStoreReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AzureDataLakeStoreSink = CopySink & { + type: "AzureDataLakeStoreSink"; + copyBehavior?: any; + enableAdlsSingleFileParallel?: any; +}; + +// @public +export type AzureDataLakeStoreSource = CopySource & { + type: "AzureDataLakeStoreSource"; + recursive?: any; +}; + +// @public +export type AzureDataLakeStoreWriteSettings = StoreWriteSettings & { + type: "AzureDataLakeStoreWriteSettings"; +}; + +// @public +export type AzureEntityResource = Resource & { + readonly etag?: string; +}; + +// @public +export type AzureFileStorageLinkedService = LinkedService & { + type: "AzureFileStorage"; + host: any; + userId?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type AzureFileStorageLocation = DatasetLocation & { + type: "AzureFileStorageLocation"; +}; + +// @public +export type AzureFileStorageReadSettings = StoreReadSettings & { + type: "AzureFileStorageReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type AzureFunctionActivity = ExecutionActivity & { + type: "AzureFunctionActivity"; + method: AzureFunctionActivityMethod; + functionName: any; + headers?: any; + body?: any; +}; + +// @public +export type AzureFunctionActivityMethod = string; + +// @public +export type AzureFunctionLinkedService = LinkedService & { + type: "AzureFunction"; + functionAppUrl: any; + functionKey?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type AzureKeyVaultLinkedService = LinkedService & { + type: "AzureKeyVault"; + baseUrl: any; +}; + +// @public +export type AzureKeyVaultSecretReference = SecretBase & { + type: "AzureKeyVaultSecret"; + store: LinkedServiceReference; + secretName: any; + secretVersion?: any; +}; + +// @public +export type AzureMariaDBLinkedService = LinkedService & { + type: "AzureMariaDB"; + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type AzureMariaDBSource = TabularSource & { + type: "AzureMariaDBSource"; + query?: any; +}; + +// @public +export type AzureMariaDBTableDataset = Dataset & { + type: "AzureMariaDBTable"; + tableName?: any; +}; + +// @public +export type AzureMLBatchExecutionActivity = ExecutionActivity & { + type: "AzureMLBatchExecution"; + globalParameters?: { + [propertyName: string]: any; + }; + webServiceOutputs?: { + [propertyName: string]: AzureMLWebServiceFile; + }; + webServiceInputs?: { + [propertyName: string]: AzureMLWebServiceFile; + }; +}; + +// @public +export type AzureMLExecutePipelineActivity = ExecutionActivity & { + type: "AzureMLExecutePipeline"; + mlPipelineId: any; + experimentName?: any; + mlPipelineParameters?: any; + mlParentRunId?: any; + continueOnStepFailure?: any; +}; + +// @public +export type AzureMLLinkedService = LinkedService & { + type: "AzureML"; + mlEndpoint: any; + apiKey: SecretBaseUnion; + updateResourceEndpoint?: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureMLServiceLinkedService = LinkedService & { + type: "AzureMLService"; + subscriptionId: any; + resourceGroupName: any; + mlWorkspaceName: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureMLUpdateResourceActivity = ExecutionActivity & { + type: "AzureMLUpdateResource"; + trainedModelName: any; + trainedModelLinkedServiceName: LinkedServiceReference; + trainedModelFilePath: any; +}; + +// @public +export interface AzureMLWebServiceFile { + filePath: any; + linkedServiceName: LinkedServiceReference; +} + +// @public +export type AzureMySqlLinkedService = LinkedService & { + type: "AzureMySql"; + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type AzureMySqlSink = CopySink & { + type: "AzureMySqlSink"; + preCopyScript?: any; +}; + +// @public +export type AzureMySqlSource = TabularSource & { + type: "AzureMySqlSource"; + query?: any; +}; + +// @public +export type AzureMySqlTableDataset = Dataset & { + type: "AzureMySqlTable"; + tableName?: any; + table?: any; +}; + +// @public +export type AzurePostgreSqlLinkedService = LinkedService & { + type: "AzurePostgreSql"; + connectionString?: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type AzurePostgreSqlSink = CopySink & { + type: "AzurePostgreSqlSink"; + preCopyScript?: any; +}; + +// @public +export type AzurePostgreSqlSource = TabularSource & { + type: "AzurePostgreSqlSource"; + query?: any; +}; + +// @public +export type AzurePostgreSqlTableDataset = Dataset & { + type: "AzurePostgreSqlTable"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type AzureQueueSink = CopySink & { + type: "AzureQueueSink"; +}; + +// @public +export type AzureSearchIndexDataset = Dataset & { + type: "AzureSearchIndex"; + indexName: any; +}; + +// @public +export type AzureSearchIndexSink = CopySink & { + type: "AzureSearchIndexSink"; + writeBehavior?: AzureSearchIndexWriteBehaviorType; +}; + +// @public +export type AzureSearchIndexWriteBehaviorType = string; + +// @public +export type AzureSearchLinkedService = LinkedService & { + type: "AzureSearch"; + url: any; + key?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type AzureSqlDatabaseLinkedService = LinkedService & { + type: "AzureSqlDatabase"; + connectionString: any; + password?: AzureKeyVaultSecretReference; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureSqlDWLinkedService = LinkedService & { + type: "AzureSqlDW"; + connectionString: any; + password?: AzureKeyVaultSecretReference; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureSqlDWTableDataset = Dataset & { + type: "AzureSqlDWTable"; + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type AzureSqlMILinkedService = LinkedService & { + type: "AzureSqlMI"; + connectionString: any; + password?: AzureKeyVaultSecretReference; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + encryptedCredential?: any; +}; + +// @public +export type AzureSqlMITableDataset = Dataset & { + type: "AzureSqlMITable"; + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type AzureSqlSink = CopySink & { + type: "AzureSqlSink"; + sqlWriterStoredProcedureName?: any; + sqlWriterTableType?: any; + preCopyScript?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + storedProcedureTableTypeParameterName?: any; + tableOption?: any; +}; + +// @public +export type AzureSqlSource = TabularSource & { + type: "AzureSqlSource"; + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + produceAdditionalTypes?: any; +}; + +// @public +export type AzureSqlTableDataset = Dataset & { + type: "AzureSqlTable"; + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type AzureStorageLinkedService = LinkedService & { + type: "AzureStorage"; + connectionString?: any; + accountKey?: AzureKeyVaultSecretReference; + sasUri?: any; + sasToken?: AzureKeyVaultSecretReference; + encryptedCredential?: string; +}; + +// @public +export type AzureTableDataset = Dataset & { + type: "AzureTable"; + tableName: any; +}; + +// @public +export type AzureTableSink = CopySink & { + type: "AzureTableSink"; + azureTableDefaultPartitionKeyValue?: any; + azureTablePartitionKeyName?: any; + azureTableRowKeyName?: any; + azureTableInsertType?: any; +}; + +// @public +export type AzureTableSource = TabularSource & { + type: "AzureTableSource"; + azureTableSourceQuery?: any; + azureTableSourceIgnoreTableNotFound?: any; +}; + +// @public +export type AzureTableStorageLinkedService = LinkedService & { + type: "AzureTableStorage"; + connectionString?: any; + accountKey?: AzureKeyVaultSecretReference; + sasUri?: any; + sasToken?: AzureKeyVaultSecretReference; + encryptedCredential?: string; +}; + +// @public (undocumented) +export interface BaseResult extends RestResponse { + _response: LROOperationResponse; +} + +// @public +export interface BigDataPoolReference { + referenceName: string; + type: BigDataPoolReferenceType; +} + +// @public +export type BigDataPoolReferenceType = string; + +// @public +export type BigDataPoolResourceInfo = TrackedResource & { + provisioningState?: string; + autoScale?: AutoScaleProperties; + creationDate?: Date; + autoPause?: AutoPauseProperties; + isComputeIsolationEnabled?: boolean; + haveLibraryRequirementsChanged?: boolean; + sessionLevelPackagesEnabled?: boolean; + sparkEventsFolder?: string; + nodeCount?: number; + libraryRequirements?: LibraryRequirements; + sparkConfigProperties?: LibraryRequirements; + sparkVersion?: string; + defaultSparkLogFolder?: string; + nodeSize?: NodeSize; + nodeSizeFamily?: NodeSizeFamily; +}; + +// @public +export interface BigDataPoolResourceInfoListResult { + nextLink?: string; + value?: BigDataPoolResourceInfo[]; +} + +// @public +export type BigDataPoolsGetResponse = BigDataPoolResourceInfo & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: BigDataPoolResourceInfo; + }; +}; + +// @public +export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: BigDataPoolResourceInfoListResult; + }; +}; + +// @public +export class BigDataPoolsOperation { + constructor(client: ArtifactsClient); + get(bigDataPoolName: string, options?: coreHttp.OperationOptions): Promise; + list(options?: coreHttp.OperationOptions): Promise; +} + +// @public +export type BinaryDataset = Dataset & { + type: "Binary"; + location?: DatasetLocationUnion; + compression?: DatasetCompressionUnion; +}; + +// @public +export type BinarySink = CopySink & { + type: "BinarySink"; + storeSettings?: StoreWriteSettingsUnion; +}; + +// @public +export type BinarySource = CopySource & { + type: "BinarySource"; + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export type BlobEventsTrigger = MultiplePipelineTrigger & { + type: "BlobEventsTrigger"; + blobPathBeginsWith?: string; + blobPathEndsWith?: string; + ignoreEmptyBlobs?: boolean; + events: BlobEventType[]; + scope: string; +}; + +// @public +export type BlobEventType = string; + +// @public +export type BlobSink = CopySink & { + type: "BlobSink"; + blobWriterOverwriteFiles?: any; + blobWriterDateTimeFormat?: any; + blobWriterAddHeader?: any; + copyBehavior?: any; +}; + +// @public +export type BlobSource = CopySource & { + type: "BlobSource"; + treatEmptyAsNull?: any; + skipHeaderLineCount?: any; + recursive?: any; +}; + +// @public +export type BlobTrigger = MultiplePipelineTrigger & { + type: "BlobTrigger"; + folderPath: string; + maxConcurrency: number; + linkedService: LinkedServiceReference; +}; + +// @public +export type CassandraLinkedService = LinkedService & { + type: "Cassandra"; + host: any; + authenticationType?: any; + port?: any; + username?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type CassandraSource = TabularSource & { + type: "CassandraSource"; + query?: any; + consistencyLevel?: CassandraSourceReadConsistencyLevels; +}; + +// @public +export type CassandraSourceReadConsistencyLevels = string; + +// @public +export type CassandraTableDataset = Dataset & { + type: "CassandraTable"; + tableName?: any; + keyspace?: any; +}; + +// @public +export type CellOutputType = string; + +// @public +export type ChainingTrigger = Trigger & { + type: "ChainingTrigger"; + pipeline: TriggerPipelineReference; + dependsOn: PipelineReference[]; + runDimension: string; +}; + +// @public +export interface CloudError { + code: string; + details?: CloudError[]; + message: string; + target?: string; +} + +// @public +export type CommonDataServiceForAppsEntityDataset = Dataset & { + type: "CommonDataServiceForAppsEntity"; + entityName?: any; +}; + +// @public +export type CommonDataServiceForAppsLinkedService = LinkedService & { + type: "CommonDataServiceForApps"; + deploymentType: DynamicsDeploymentType; + hostName?: any; + port?: any; + serviceUri?: any; + organizationName?: any; + authenticationType: DynamicsAuthenticationType; + username?: any; + password?: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredential?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type CommonDataServiceForAppsSink = CopySink & { + type: "CommonDataServiceForAppsSink"; + writeBehavior: DynamicsSinkWriteBehavior; + ignoreNullValues?: any; + alternateKeyName?: any; +}; + +// @public +export type CommonDataServiceForAppsSource = CopySource & { + type: "CommonDataServiceForAppsSource"; + query?: any; +}; + +// @public +export type ConcurLinkedService = LinkedService & { + type: "Concur"; + clientId: any; + username: any; + password?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ConcurObjectDataset = Dataset & { + type: "ConcurObject"; + tableName?: any; +}; + +// @public +export type ConcurSource = TabularSource & { + type: "ConcurSource"; + query?: any; +}; + +// @public +export type ControlActivity = Activity & { + type: "Container"; +}; + +// @public +export type CopyActivity = ExecutionActivity & { + type: "Copy"; + inputs?: DatasetReference[]; + outputs?: DatasetReference[]; + source: CopySourceUnion; + sink: CopySinkUnion; + translator?: any; + enableStaging?: any; + stagingSettings?: StagingSettings; + parallelCopies?: any; + dataIntegrationUnits?: any; + enableSkipIncompatibleRow?: any; + redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings; + preserveRules?: any[]; + preserve?: any[]; +}; + +// @public +export type CopyBehaviorType = string; + +// @public +export interface CopySink { + [property: string]: any; + maxConcurrentConnections?: any; + sinkRetryCount?: any; + sinkRetryWait?: any; + type: "DelimitedTextSink" | "JsonSink" | "OrcSink" | "AzurePostgreSqlSink" | "AzureMySqlSink" | "SapCloudForCustomerSink" | "AzureQueueSink" | "AzureTableSink" | "AvroSink" | "ParquetSink" | "BinarySink" | "BlobSink" | "FileSystemSink" | "DocumentDbCollectionSink" | "CosmosDbSqlApiSink" | "SqlSink" | "SqlServerSink" | "AzureSqlSink" | "SqlMISink" | "SqlDWSink" | "OracleSink" | "AzureDataLakeStoreSink" | "AzureBlobFSSink" | "AzureSearchIndexSink" | "OdbcSink" | "InformixSink" | "MicrosoftAccessSink" | "DynamicsSink" | "DynamicsCrmSink" | "CommonDataServiceForAppsSink" | "AzureDataExplorerSink" | "SalesforceSink" | "SalesforceServiceCloudSink" | "CosmosDbMongoDbApiSink"; + writeBatchSize?: any; + writeBatchTimeout?: any; +} + +// @public (undocumented) +export type CopySinkUnion = CopySink | DelimitedTextSink | JsonSink | OrcSink | AzurePostgreSqlSink | AzureMySqlSink | SapCloudForCustomerSink | AzureQueueSink | AzureTableSink | AvroSink | ParquetSink | BinarySink | BlobSink | FileSystemSink | DocumentDbCollectionSink | CosmosDbSqlApiSink | SqlSink | SqlServerSink | AzureSqlSink | SqlMISink | SqlDWSink | OracleSink | AzureDataLakeStoreSink | AzureBlobFSSink | AzureSearchIndexSink | OdbcSink | InformixSink | MicrosoftAccessSink | DynamicsSink | DynamicsCrmSink | CommonDataServiceForAppsSink | AzureDataExplorerSink | SalesforceSink | SalesforceServiceCloudSink | CosmosDbMongoDbApiSink; + +// @public +export interface CopySource { + [property: string]: any; + maxConcurrentConnections?: any; + sourceRetryCount?: any; + sourceRetryWait?: any; + type: "AvroSource" | "ParquetSource" | "DelimitedTextSource" | "JsonSource" | "OrcSource" | "BinarySource" | "TabularSource" | "AzureTableSource" | "BlobSource" | "DocumentDbCollectionSource" | "CosmosDbSqlApiSource" | "DynamicsSource" | "DynamicsCrmSource" | "CommonDataServiceForAppsSource" | "RelationalSource" | "InformixSource" | "MicrosoftAccessSource" | "Db2Source" | "OdbcSource" | "MySqlSource" | "PostgreSqlSource" | "SybaseSource" | "SapBwSource" | "ODataSource" | "SalesforceSource" | "SalesforceServiceCloudSource" | "SapCloudForCustomerSource" | "SapEccSource" | "SapHanaSource" | "SapOpenHubSource" | "SapTableSource" | "RestSource" | "SqlSource" | "SqlServerSource" | "AzureSqlSource" | "SqlMISource" | "SqlDWSource" | "FileSystemSource" | "HdfsSource" | "AzureMySqlSource" | "AzureDataExplorerSource" | "OracleSource" | "TeradataSource" | "WebSource" | "CassandraSource" | "MongoDbSource" | "MongoDbV2Source" | "CosmosDbMongoDbApiSource" | "Office365Source" | "AzureDataLakeStoreSource" | "AzureBlobFSSource" | "HttpSource" | "AmazonMWSSource" | "AzurePostgreSqlSource" | "ConcurSource" | "CouchbaseSource" | "DrillSource" | "EloquaSource" | "GoogleBigQuerySource" | "GreenplumSource" | "HBaseSource" | "HiveSource" | "HubspotSource" | "ImpalaSource" | "JiraSource" | "MagentoSource" | "MariaDBSource" | "AzureMariaDBSource" | "MarketoSource" | "PaypalSource" | "PhoenixSource" | "PrestoSource" | "QuickBooksSource" | "ServiceNowSource" | "ShopifySource" | "SparkSource" | "SquareSource" | "XeroSource" | "ZohoSource" | "NetezzaSource" | "VerticaSource" | "SalesforceMarketingCloudSource" | "ResponsysSource" | "DynamicsAXSource" | "OracleServiceCloudSource" | "GoogleAdWordsSource" | "AmazonRedshiftSource"; +} + +// @public (undocumented) +export type CopySourceUnion = CopySource | AvroSource | ParquetSource | DelimitedTextSource | JsonSource | OrcSource | BinarySource | TabularSourceUnion | BlobSource | DocumentDbCollectionSource | CosmosDbSqlApiSource | DynamicsSource | DynamicsCrmSource | CommonDataServiceForAppsSource | RelationalSource | MicrosoftAccessSource | ODataSource | SalesforceServiceCloudSource | RestSource | FileSystemSource | HdfsSource | AzureDataExplorerSource | OracleSource | WebSource | MongoDbSource | MongoDbV2Source | CosmosDbMongoDbApiSource | Office365Source | AzureDataLakeStoreSource | AzureBlobFSSource | HttpSource; + +// @public +export interface CopyTranslator { + [property: string]: any; + type: "TabularTranslator"; +} + +// @public (undocumented) +export type CopyTranslatorUnion = CopyTranslator | TabularTranslator; + +// @public +export type CosmosDbLinkedService = LinkedService & { + type: "CosmosDb"; + connectionString?: any; + accountEndpoint?: any; + database?: any; + accountKey?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type CosmosDbMongoDbApiCollectionDataset = Dataset & { + type: "CosmosDbMongoDbApiCollection"; + collection: any; +}; + +// @public +export type CosmosDbMongoDbApiLinkedService = LinkedService & { + type: "CosmosDbMongoDbApi"; + connectionString: any; + database: any; +}; + +// @public +export type CosmosDbMongoDbApiSink = CopySink & { + type: "CosmosDbMongoDbApiSink"; + writeBehavior?: any; +}; + +// @public +export type CosmosDbMongoDbApiSource = CopySource & { + type: "CosmosDbMongoDbApiSource"; + filter?: any; + cursorMethods?: MongoDbCursorMethodsProperties; + batchSize?: any; + queryTimeout?: any; +}; + +// @public +export type CosmosDbSqlApiCollectionDataset = Dataset & { + type: "CosmosDbSqlApiCollection"; + collectionName: any; +}; + +// @public +export type CosmosDbSqlApiSink = CopySink & { + type: "CosmosDbSqlApiSink"; + writeBehavior?: any; +}; + +// @public +export type CosmosDbSqlApiSource = CopySource & { + type: "CosmosDbSqlApiSource"; + query?: any; + pageSize?: any; + preferredRegions?: any; +}; + +// @public +export type CouchbaseLinkedService = LinkedService & { + type: "Couchbase"; + connectionString?: any; + credString?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type CouchbaseSource = TabularSource & { + type: "CouchbaseSource"; + query?: any; +}; + +// @public +export type CouchbaseTableDataset = Dataset & { + type: "CouchbaseTable"; + tableName?: any; +}; + +// @public +export interface CreateDataFlowDebugSessionRequest { + clusterTimeout?: number; + dataBricksLinkedService?: LinkedServiceResource; + dataFlowName?: string; + existingClusterId?: string; + newClusterName?: string; + newClusterNodeType?: string; +} + +// @public +export interface CreateDataFlowDebugSessionResponse { + sessionId?: string; +} + +// @public +export interface CreateRunResponse { + runId: string; +} + +// @public +export type CustomActivity = ExecutionActivity & { + type: "Custom"; + command: any; + resourceLinkedService?: LinkedServiceReference; + folderPath?: any; + referenceObjects?: CustomActivityReferenceObject; + extendedProperties?: { + [propertyName: string]: any; + }; + retentionTimeInDays?: any; +}; + +// @public +export interface CustomActivityReferenceObject { + datasets?: DatasetReference[]; + linkedServices?: LinkedServiceReference[]; +} + +// @public +export type CustomDataset = Dataset & { + type: "CustomDataset"; + typeProperties?: any; +}; + +// @public +export type CustomDataSourceLinkedService = LinkedService & { + type: "CustomDataSource"; + typeProperties: any; +}; + +// @public +export interface CustomerManagedKeyDetails { + key?: WorkspaceKeyDetails; + readonly status?: string; +} + +// @public +export interface CustomSetupBase { + type: "undefined"; +} + +// @public +export type DatabricksNotebookActivity = ExecutionActivity & { + type: "DatabricksNotebook"; + notebookPath: any; + baseParameters?: { + [propertyName: string]: any; + }; + libraries?: { + [propertyName: string]: any; + }[]; +}; + +// @public +export type DatabricksSparkJarActivity = ExecutionActivity & { + type: "DatabricksSparkJar"; + mainClassName: any; + parameters?: any[]; + libraries?: { + [propertyName: string]: any; + }[]; +}; + +// @public +export type DatabricksSparkPythonActivity = ExecutionActivity & { + type: "DatabricksSparkPython"; + pythonFile: any; + parameters?: any[]; + libraries?: { + [propertyName: string]: any; + }[]; +}; + +// @public +export interface DataFlow { + annotations?: any[]; + description?: string; + folder?: DataFlowFolder; + type: "MappingDataFlow"; +} + +// @public +export type DataFlowComputeType = string; + +// @public +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface DataFlowDebugCommandRequest { + commandName?: string; + commandPayload: any; + dataFlowName?: string; + sessionId: string; +} + +// @public +export interface DataFlowDebugCommandResponse { + data?: string; + status?: string; +} + +// @public +export interface DataFlowDebugPackage { + [property: string]: any; + dataFlow?: DataFlowDebugResource; + datasets?: DatasetDebugResource[]; + debugSettings?: DataFlowDebugPackageDebugSettings; + linkedServices?: LinkedServiceDebugResource[]; + sessionId?: string; + staging?: DataFlowStagingInfo; +} + +// @public +export interface DataFlowDebugPackageDebugSettings { + datasetParameters?: any; + parameters?: { + [propertyName: string]: any; + }; + sourceSettings?: DataFlowSourceSetting[]; +} + +// @public +export interface DataFlowDebugPreviewDataRequest { + dataFlowName?: string; + rowLimits?: number; + sessionId?: string; + streamName?: string; +} + +// @public +export interface DataFlowDebugQueryResponse { + runId?: string; +} + +// @public +export type DataFlowDebugResource = SubResourceDebugResource & { + properties: DataFlowUnion; +}; + +// @public +export interface DataFlowDebugResultResponse { + data?: string; + status?: string; +} + +// @public +export type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: AddDataFlowToDebugSessionResponse; + }; +}; + +// @public +export interface DataFlowDebugSessionCreateDataFlowDebugSessionHeaders { + location?: string; +} + +// @public +export type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: CreateDataFlowDebugSessionResponse; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface DataFlowDebugSessionExecuteCommandHeaders { + location?: string; +} + +// @public +export type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowDebugCommandResponse; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface DataFlowDebugSessionInfo { + [property: string]: any; + computeType?: string; + coreCount?: number; + dataFlowName?: string; + integrationRuntimeName?: string; + lastActivityTime?: string; + nodeCount?: number; + sessionId?: string; + startTime?: string; + timeToLiveInMinutes?: number; +} + +// @public +export class DataFlowDebugSessionOperation { + constructor(client: ArtifactsClient); + addDataFlow(request: DataFlowDebugPackage, options?: coreHttp.OperationOptions): Promise; + createDataFlowDebugSession(request: CreateDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions): Promise>; + deleteDataFlowDebugSession(request: DeleteDataFlowDebugSessionRequest, options?: coreHttp.OperationOptions): Promise; + executeCommand(request: DataFlowDebugCommandRequest, options?: coreHttp.OperationOptions): Promise>; + listQueryDataFlowDebugSessionsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + } + +// @public +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: QueryDataFlowDebugSessionsResponse; + }; +}; + +// @public +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: QueryDataFlowDebugSessionsResponse; + }; +}; + +// @public +export interface DataFlowDebugStatisticsRequest { + columns?: string[]; + dataFlowName?: string; + sessionId?: string; + streamName?: string; +} + +// @public +export interface DataFlowFolder { + name?: string; +} + +// @public +export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type DataFlowGetDataFlowResponse = DataFlowResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowResource; + }; +}; + +// @public +export type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowListResponse; + }; +}; + +// @public +export type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DataFlowListResponse; + }; +}; + +// @public +export interface DataFlowListResponse { + nextLink?: string; + value: DataFlowResource[]; +} + +// @public +export class DataFlowOperation { + constructor(client: ArtifactsClient); + createOrUpdateDataFlow(dataFlowName: string, dataFlow: DataFlowResource, options?: DataFlowCreateOrUpdateDataFlowOptionalParams): Promise>; + deleteDataFlow(dataFlowName: string, options?: coreHttp.OperationOptions): Promise>; + getDataFlow(dataFlowName: string, options?: DataFlowGetDataFlowOptionalParams): Promise; + listDataFlowsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameDataFlow(dataFlowName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + +// @public +export interface DataFlowReference { + [property: string]: any; + datasetParameters?: any; + referenceName: string; + type: DataFlowReferenceType; +} + +// @public +export type DataFlowReferenceType = string; + +// @public +export type DataFlowResource = AzureEntityResource & { + properties: DataFlowUnion; +}; + +// @public +export type DataFlowSink = Transformation & { + dataset?: DatasetReference; +}; + +// @public +export type DataFlowSource = Transformation & { + dataset?: DatasetReference; +}; + +// @public +export interface DataFlowSourceSetting { + [property: string]: any; + rowLimit?: number; + sourceName?: string; +} + +// @public +export interface DataFlowStagingInfo { + folderPath?: string; + linkedService?: LinkedServiceReference; +} + +// @public (undocumented) +export type DataFlowUnion = DataFlow | MappingDataFlow; + +// @public +export type DataLakeAnalyticsUsqlActivity = ExecutionActivity & { + type: "DataLakeAnalyticsU-SQL"; + scriptPath: any; + scriptLinkedService: LinkedServiceReference; + degreeOfParallelism?: any; + priority?: any; + parameters?: { + [propertyName: string]: any; + }; + runtimeVersion?: any; + compilationMode?: any; +}; + +// @public +export interface DataLakeStorageAccountDetails { + accountUrl?: string; + filesystem?: string; +} + +// @public +export interface Dataset { + [property: string]: any; + annotations?: any[]; + description?: string; + folder?: DatasetFolder; + linkedServiceName: LinkedServiceReference; + parameters?: { + [propertyName: string]: ParameterSpecification; + }; + schema?: any; + structure?: any; + type: "Avro" | "Parquet" | "DelimitedText" | "Json" | "Orc" | "Binary" | "AzureTable" | "AzureSqlTable" | "AzureSqlMITable" | "AzureSqlDWTable" | "CassandraTable" | "CustomDataset" | "CosmosDbSqlApiCollection" | "DocumentDbCollection" | "DynamicsEntity" | "DynamicsCrmEntity" | "CommonDataServiceForAppsEntity" | "Office365Table" | "MongoDbCollection" | "MongoDbV2Collection" | "CosmosDbMongoDbApiCollection" | "ODataResource" | "OracleTable" | "TeradataTable" | "AzureMySqlTable" | "AmazonRedshiftTable" | "Db2Table" | "RelationalTable" | "InformixTable" | "OdbcTable" | "MySqlTable" | "PostgreSqlTable" | "MicrosoftAccessTable" | "SalesforceObject" | "SalesforceServiceCloudObject" | "SybaseTable" | "SapBwCube" | "SapCloudForCustomerResource" | "SapEccResource" | "SapHanaTable" | "SapOpenHubTable" | "SqlServerTable" | "RestResource" | "SapTableResource" | "WebTable" | "AzureSearchIndex" | "AmazonMWSObject" | "AzurePostgreSqlTable" | "ConcurObject" | "CouchbaseTable" | "DrillTable" | "EloquaObject" | "GoogleBigQueryObject" | "GreenplumTable" | "HBaseObject" | "HiveObject" | "HubspotObject" | "ImpalaObject" | "JiraObject" | "MagentoObject" | "MariaDBTable" | "AzureMariaDBTable" | "MarketoObject" | "PaypalObject" | "PhoenixObject" | "PrestoObject" | "QuickBooksObject" | "ServiceNowObject" | "ShopifyObject" | "SparkObject" | "SquareObject" | "XeroObject" | "ZohoObject" | "NetezzaTable" | "VerticaTable" | "SalesforceMarketingCloudObject" | "ResponsysObject" | "DynamicsAXResource" | "OracleServiceCloudObject" | "AzureDataExplorerTable" | "GoogleAdWordsObject"; +} + +// @public +export type DatasetBZip2Compression = DatasetCompression & { + type: "BZip2"; +}; + +// @public +export interface DatasetCompression { + [property: string]: any; + type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate"; +} + +// @public +export type DatasetCompressionLevel = string; + +// @public (undocumented) +export type DatasetCompressionUnion = DatasetCompression | DatasetBZip2Compression | DatasetGZipCompression | DatasetDeflateCompression | DatasetZipDeflateCompression; + +// @public +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DatasetResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface DatasetDataElement { + name?: any; + type?: any; +} + +// @public +export type DatasetDebugResource = SubResourceDebugResource & { + properties: DatasetUnion; +}; + +// @public +export type DatasetDeflateCompression = DatasetCompression & { + type: "Deflate"; + level?: DatasetCompressionLevel; +}; + +// @public +export interface DatasetFolder { + name?: string; +} + +// @public +export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type DatasetGetDatasetResponse = DatasetResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DatasetResource; + }; +}; + +// @public +export type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DatasetListResponse; + }; +}; + +// @public +export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: DatasetListResponse; + }; +}; + +// @public +export type DatasetGZipCompression = DatasetCompression & { + type: "GZip"; + level?: DatasetCompressionLevel; +}; + +// @public +export interface DatasetListResponse { + nextLink?: string; + value: DatasetResource[]; +} + +// @public +export interface DatasetLocation { + [property: string]: any; + fileName?: any; + folderPath?: any; + type: "AzureBlobStorageLocation" | "AzureBlobFSLocation" | "AzureDataLakeStoreLocation" | "AmazonS3Location" | "FileServerLocation" | "AzureFileStorageLocation" | "GoogleCloudStorageLocation" | "FtpServerLocation" | "SftpLocation" | "HttpServerLocation" | "HdfsLocation"; +} + +// @public (undocumented) +export type DatasetLocationUnion = DatasetLocation | AzureBlobStorageLocation | AzureBlobFSLocation | AzureDataLakeStoreLocation | AmazonS3Location | FileServerLocation | AzureFileStorageLocation | GoogleCloudStorageLocation | FtpServerLocation | SftpLocation | HttpServerLocation | HdfsLocation; + +// @public +export class DatasetOperation { + constructor(client: ArtifactsClient); + createOrUpdateDataset(datasetName: string, dataset: DatasetResource, options?: DatasetCreateOrUpdateDatasetOptionalParams): Promise>; + deleteDataset(datasetName: string, options?: coreHttp.OperationOptions): Promise>; + getDataset(datasetName: string, options?: DatasetGetDatasetOptionalParams): Promise; + listDatasetsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameDataset(datasetName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + +// @public +export interface DatasetReference { + parameters?: { + [propertyName: string]: any; + }; + referenceName: string; + type: DatasetReferenceType; +} + +// @public +export type DatasetReferenceType = string; + +// @public +export type DatasetResource = AzureEntityResource & { + properties: DatasetUnion; +}; + +// @public +export interface DatasetSchemaDataElement { + [property: string]: any; + name?: any; + type?: any; +} + +// @public +export interface DatasetStorageFormat { + [property: string]: any; + deserializer?: any; + serializer?: any; + type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; +} + +// @public (undocumented) +export type DatasetStorageFormatUnion = DatasetStorageFormat | TextFormat | JsonFormat | AvroFormat | OrcFormat | ParquetFormat; + +// @public (undocumented) +export type DatasetUnion = Dataset | AvroDataset | ParquetDataset | DelimitedTextDataset | JsonDataset | OrcDataset | BinaryDataset | AzureTableDataset | AzureSqlTableDataset | AzureSqlMITableDataset | AzureSqlDWTableDataset | CassandraTableDataset | CustomDataset | CosmosDbSqlApiCollectionDataset | DocumentDbCollectionDataset | DynamicsEntityDataset | DynamicsCrmEntityDataset | CommonDataServiceForAppsEntityDataset | Office365Dataset | MongoDbCollectionDataset | MongoDbV2CollectionDataset | CosmosDbMongoDbApiCollectionDataset | ODataResourceDataset | OracleTableDataset | TeradataTableDataset | AzureMySqlTableDataset | AmazonRedshiftTableDataset | Db2TableDataset | RelationalTableDataset | InformixTableDataset | OdbcTableDataset | MySqlTableDataset | PostgreSqlTableDataset | MicrosoftAccessTableDataset | SalesforceObjectDataset | SalesforceServiceCloudObjectDataset | SybaseTableDataset | SapBwCubeDataset | SapCloudForCustomerResourceDataset | SapEccResourceDataset | SapHanaTableDataset | SapOpenHubTableDataset | SqlServerTableDataset | RestResourceDataset | SapTableResourceDataset | WebTableDataset | AzureSearchIndexDataset | AmazonMWSObjectDataset | AzurePostgreSqlTableDataset | ConcurObjectDataset | CouchbaseTableDataset | DrillTableDataset | EloquaObjectDataset | GoogleBigQueryObjectDataset | GreenplumTableDataset | HBaseObjectDataset | HiveObjectDataset | HubspotObjectDataset | ImpalaObjectDataset | JiraObjectDataset | MagentoObjectDataset | MariaDBTableDataset | AzureMariaDBTableDataset | MarketoObjectDataset | PaypalObjectDataset | PhoenixObjectDataset | PrestoObjectDataset | QuickBooksObjectDataset | ServiceNowObjectDataset | ShopifyObjectDataset | SparkObjectDataset | SquareObjectDataset | XeroObjectDataset | ZohoObjectDataset | NetezzaTableDataset | VerticaTableDataset | SalesforceMarketingCloudObjectDataset | ResponsysObjectDataset | DynamicsAXResourceDataset | OracleServiceCloudObjectDataset | AzureDataExplorerTableDataset | GoogleAdWordsObjectDataset; + +// @public +export type DatasetZipDeflateCompression = DatasetCompression & { + type: "ZipDeflate"; + level?: DatasetCompressionLevel; +}; + +// @public +export type DayOfWeek = "Sunday" | "Monday" | "Tuesday" | "Wednesday" | "Thursday" | "Friday" | "Saturday"; + +// @public +export type Db2AuthenticationType = string; + +// @public +export type Db2LinkedService = LinkedService & { + type: "Db2"; + server: any; + database: any; + authenticationType?: Db2AuthenticationType; + username?: any; + password?: SecretBaseUnion; + packageCollection?: any; + certificateCommonName?: any; + encryptedCredential?: any; +}; + +// @public +export type Db2Source = TabularSource & { + type: "Db2Source"; + query?: any; +}; + +// @public +export type Db2TableDataset = Dataset & { + type: "Db2Table"; + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type DeleteActivity = ExecutionActivity & { + type: "Delete"; + recursive?: any; + maxConcurrentConnections?: number; + enableLogging?: any; + logStorageSettings?: LogStorageSettings; + dataset: DatasetReference; +}; + +// @public +export interface DeleteDataFlowDebugSessionRequest { + dataFlowName?: string; + sessionId?: string; +} + +// @public +export type DelimitedTextCompressionCodec = string; + +// @public +export type DelimitedTextDataset = Dataset & { + type: "DelimitedText"; + location?: DatasetLocationUnion; + columnDelimiter?: any; + rowDelimiter?: any; + encodingName?: any; + compressionCodec?: DelimitedTextCompressionCodec; + compressionLevel?: DatasetCompressionLevel; + quoteChar?: any; + escapeChar?: any; + firstRowAsHeader?: any; + nullValue?: any; +}; + +// @public +export type DelimitedTextReadSettings = FormatReadSettings & { + type: "DelimitedTextReadSettings"; + skipLineCount?: any; +}; + +// @public +export type DelimitedTextSink = CopySink & { + type: "DelimitedTextSink"; + storeSettings?: StoreWriteSettingsUnion; + formatSettings?: DelimitedTextWriteSettings; +}; + +// @public +export type DelimitedTextSource = CopySource & { + type: "DelimitedTextSource"; + storeSettings?: StoreReadSettingsUnion; + formatSettings?: DelimitedTextReadSettings; +}; + +// @public +export type DelimitedTextWriteSettings = FormatWriteSettings & { + type: "DelimitedTextWriteSettings"; + quoteAllText?: any; + fileExtension: any; +}; + +// @public +export type DependencyCondition = string; + +// @public +export interface DependencyReference { + type: "TriggerDependencyReference" | "TumblingWindowTriggerDependencyReference" | "SelfDependencyTumblingWindowTriggerReference"; +} + +// @public (undocumented) +export type DependencyReferenceUnion = DependencyReference | TriggerDependencyReferenceUnion | SelfDependencyTumblingWindowTriggerReference; + +// @public +export interface DistcpSettings { + distcpOptions?: any; + resourceManagerEndpoint: any; + tempScriptPath: any; +} + +// @public +export type DocumentDbCollectionDataset = Dataset & { + type: "DocumentDbCollection"; + collectionName: any; +}; + +// @public +export type DocumentDbCollectionSink = CopySink & { + type: "DocumentDbCollectionSink"; + nestingSeparator?: any; + writeBehavior?: any; +}; + +// @public +export type DocumentDbCollectionSource = CopySource & { + type: "DocumentDbCollectionSource"; + query?: any; + nestingSeparator?: any; + queryTimeout?: any; +}; + +// @public +export type DrillLinkedService = LinkedService & { + type: "Drill"; + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type DrillSource = TabularSource & { + type: "DrillSource"; + query?: any; +}; + +// @public +export type DrillTableDataset = Dataset & { + type: "DrillTable"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export interface DWCopyCommandDefaultValue { + columnName?: any; + defaultValue?: any; +} + +// @public +export interface DWCopyCommandSettings { + additionalOptions?: { + [propertyName: string]: string; + }; + defaultValues?: DWCopyCommandDefaultValue[]; +} + +// @public +export type DynamicsAuthenticationType = string; + +// @public +export type DynamicsAXLinkedService = LinkedService & { + type: "DynamicsAX"; + url: any; + servicePrincipalId: any; + servicePrincipalKey: SecretBaseUnion; + tenant: any; + aadResourceId: any; + encryptedCredential?: any; +}; + +// @public +export type DynamicsAXResourceDataset = Dataset & { + type: "DynamicsAXResource"; + path: any; +}; + +// @public +export type DynamicsAXSource = TabularSource & { + type: "DynamicsAXSource"; + query?: any; +}; + +// @public +export type DynamicsCrmEntityDataset = Dataset & { + type: "DynamicsCrmEntity"; + entityName?: any; +}; + +// @public +export type DynamicsCrmLinkedService = LinkedService & { + type: "DynamicsCrm"; + deploymentType: DynamicsDeploymentType; + hostName?: any; + port?: any; + serviceUri?: any; + organizationName?: any; + authenticationType: DynamicsAuthenticationType; + username?: any; + password?: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredential?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type DynamicsCrmSink = CopySink & { + type: "DynamicsCrmSink"; + writeBehavior: DynamicsSinkWriteBehavior; + ignoreNullValues?: any; + alternateKeyName?: any; +}; + +// @public +export type DynamicsCrmSource = CopySource & { + type: "DynamicsCrmSource"; + query?: any; +}; + +// @public +export type DynamicsDeploymentType = string; + +// @public +export type DynamicsEntityDataset = Dataset & { + type: "DynamicsEntity"; + entityName?: any; +}; + +// @public +export type DynamicsLinkedService = LinkedService & { + type: "Dynamics"; + deploymentType: DynamicsDeploymentType; + hostName?: string; + port?: string; + serviceUri?: string; + organizationName?: string; + authenticationType: DynamicsAuthenticationType; + username?: any; + password?: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + servicePrincipalCredential?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type DynamicsServicePrincipalCredentialType = string; + +// @public +export type DynamicsSink = CopySink & { + type: "DynamicsSink"; + writeBehavior: DynamicsSinkWriteBehavior; + ignoreNullValues?: any; + alternateKeyName?: any; +}; + +// @public +export type DynamicsSinkWriteBehavior = string; + +// @public +export type DynamicsSource = CopySource & { + type: "DynamicsSource"; + query?: any; +}; + +// @public +export type EloquaLinkedService = LinkedService & { + type: "Eloqua"; + endpoint: any; + username: any; + password?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type EloquaObjectDataset = Dataset & { + type: "EloquaObject"; + tableName?: any; +}; + +// @public +export type EloquaSource = TabularSource & { + type: "EloquaSource"; + query?: any; +}; + +// @public +export interface EncryptionDetails { + cmk?: CustomerManagedKeyDetails; + readonly doubleEncryptionEnabled?: boolean; +} + +// @public +export interface EntityReference { + referenceName?: string; + type?: IntegrationRuntimeEntityReferenceType; +} + +// @public +export interface ErrorAdditionalInfo { + readonly info?: any; + readonly type?: string; +} + +// @public +export interface ErrorContract { + error?: ErrorResponse; +} + +// @public +export interface ErrorResponse { + readonly additionalInfo?: ErrorAdditionalInfo[]; + readonly code?: string; + readonly details?: ErrorResponse[]; + readonly message?: string; + readonly target?: string; +} + +// @public +export interface EvaluateDataFlowExpressionRequest { + dataFlowName?: string; + expression?: string; + rowLimits?: number; + sessionId?: string; + streamName?: string; +} + +// @public +export type EventSubscriptionStatus = string; + +// @public +export type ExecuteDataFlowActivity = ExecutionActivity & { + type: "ExecuteDataFlow"; + dataFlow: DataFlowReference; + staging?: DataFlowStagingInfo; + integrationRuntime?: IntegrationRuntimeReference; + compute?: ExecuteDataFlowActivityTypePropertiesCompute; +}; + +// @public +export interface ExecuteDataFlowActivityTypePropertiesCompute { + computeType?: DataFlowComputeType; + coreCount?: number; +} + +// @public +export type ExecutePipelineActivity = Activity & { + type: "ExecutePipeline"; + pipeline: PipelineReference; + parameters?: { + [propertyName: string]: any; + }; + waitOnCompletion?: boolean; +}; + +// @public +export type ExecuteSsisPackageActivity = ExecutionActivity & { + type: "ExecuteSSISPackage"; + packageLocation: SsisPackageLocation; + runtime?: any; + loggingLevel?: any; + environmentPath?: any; + executionCredential?: SsisExecutionCredential; + connectVia: IntegrationRuntimeReference; + projectParameters?: { + [propertyName: string]: SsisExecutionParameter; + }; + packageParameters?: { + [propertyName: string]: SsisExecutionParameter; + }; + projectConnectionManagers?: { + [propertyName: string]: any; + }; + packageConnectionManagers?: { + [propertyName: string]: any; + }; + propertyOverrides?: { + [propertyName: string]: SsisPropertyOverride; + }; + logLocation?: SsisLogLocation; +}; + +// @public +export type ExecutionActivity = Activity & { + type: "Execution"; + linkedServiceName?: LinkedServiceReference; + policy?: ActivityPolicy; +}; + +// @public (undocumented) +export type ExecutionActivityUnion = ExecutionActivity | CopyActivity | HDInsightHiveActivity | HDInsightPigActivity | HDInsightMapReduceActivity | HDInsightStreamingActivity | HDInsightSparkActivity | ExecuteSsisPackageActivity | CustomActivity | SqlServerStoredProcedureActivity | DeleteActivity | AzureDataExplorerCommandActivity | LookupActivity | WebActivity | GetMetadataActivity | AzureMLBatchExecutionActivity | AzureMLUpdateResourceActivity | AzureMLExecutePipelineActivity | DataLakeAnalyticsUsqlActivity | DatabricksNotebookActivity | DatabricksSparkJarActivity | DatabricksSparkPythonActivity | AzureFunctionActivity | ExecuteDataFlowActivity | SynapseNotebookActivity | SynapseSparkJobDefinitionActivity; + +// @public +export interface ExposureControlRequest { + featureName?: string; + featureType?: string; +} + +// @public +export interface ExposureControlResponse { + readonly featureName?: string; + readonly value?: string; +} + +// @public +export interface Expression { + type: ExpressionType; + value: string; +} + +// @public +export type ExpressionType = string; + +// @public +export type FileServerLinkedService = LinkedService & { + type: "FileServer"; + host: any; + userId?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type FileServerLocation = DatasetLocation & { + type: "FileServerLocation"; +}; + +// @public +export type FileServerReadSettings = StoreReadSettings & { + type: "FileServerReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type FileServerWriteSettings = StoreWriteSettings & { + type: "FileServerWriteSettings"; +}; + +// @public +export type FileSystemSink = CopySink & { + type: "FileSystemSink"; + copyBehavior?: any; +}; + +// @public +export type FileSystemSource = CopySource & { + type: "FileSystemSource"; + recursive?: any; +}; + +// @public +export type FilterActivity = Activity & { + type: "Filter"; + items: Expression; + condition: Expression; +}; + +// @public (undocumented) +export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; + +// @public +export type ForEachActivity = Activity & { + type: "ForEach"; + isSequential?: boolean; + batchCount?: number; + items: Expression; + activities: ActivityUnion[]; +}; + +// @public +export interface FormatReadSettings { + [property: string]: any; + type: "DelimitedTextReadSettings"; +} + +// @public (undocumented) +export type FormatReadSettingsUnion = FormatReadSettings | DelimitedTextReadSettings; + +// @public +export interface FormatWriteSettings { + [property: string]: any; + type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; +} + +// @public (undocumented) +export type FormatWriteSettingsUnion = FormatWriteSettings | AvroWriteSettings | DelimitedTextWriteSettings | JsonWriteSettings; + +// @public +export type FtpAuthenticationType = string; + +// @public +export type FtpReadSettings = StoreReadSettings & { + type: "FtpReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + useBinaryTransfer?: boolean; +}; + +// @public +export type FtpServerLinkedService = LinkedService & { + type: "FtpServer"; + host: any; + port?: any; + authenticationType?: FtpAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; + enableSsl?: any; + enableServerCertificateValidation?: any; +}; + +// @public +export type FtpServerLocation = DatasetLocation & { + type: "FtpServerLocation"; +}; + +// @public +export type GetMetadataActivity = ExecutionActivity & { + type: "GetMetadata"; + dataset: DatasetReference; + fieldList?: any[]; +}; + +// @public +export interface GetSsisObjectMetadataRequest { + metadataPath?: string; +} + +// @public (undocumented) +export interface GitHubAccessTokenRequest { + gitHubAccessCode: string; + gitHubAccessTokenBaseUrl: string; + gitHubClientId: string; +} + +// @public (undocumented) +export interface GitHubAccessTokenResponse { + // (undocumented) + gitHubAccessToken?: string; +} + +// @public +export type GoogleAdWordsAuthenticationType = string; + +// @public +export type GoogleAdWordsLinkedService = LinkedService & { + type: "GoogleAdWords"; + clientCustomerID: any; + developerToken: SecretBaseUnion; + authenticationType: GoogleAdWordsAuthenticationType; + refreshToken?: SecretBaseUnion; + clientId?: any; + clientSecret?: SecretBaseUnion; + email?: any; + keyFilePath?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + encryptedCredential?: any; +}; + +// @public +export type GoogleAdWordsObjectDataset = Dataset & { + type: "GoogleAdWordsObject"; + tableName?: any; +}; + +// @public +export type GoogleAdWordsSource = TabularSource & { + type: "GoogleAdWordsSource"; + query?: any; +}; + +// @public +export type GoogleBigQueryAuthenticationType = string; + +// @public +export type GoogleBigQueryLinkedService = LinkedService & { + type: "GoogleBigQuery"; + project: any; + additionalProjects?: any; + requestGoogleDriveScope?: any; + authenticationType: GoogleBigQueryAuthenticationType; + refreshToken?: SecretBaseUnion; + clientId?: any; + clientSecret?: SecretBaseUnion; + email?: any; + keyFilePath?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + encryptedCredential?: any; +}; + +// @public +export type GoogleBigQueryObjectDataset = Dataset & { + type: "GoogleBigQueryObject"; + tableName?: any; + table?: any; + dataset?: any; +}; + +// @public +export type GoogleBigQuerySource = TabularSource & { + type: "GoogleBigQuerySource"; + query?: any; +}; + +// @public +export type GoogleCloudStorageLinkedService = LinkedService & { + type: "GoogleCloudStorage"; + accessKeyId?: any; + secretAccessKey?: SecretBaseUnion; + serviceUrl?: any; + encryptedCredential?: any; +}; + +// @public +export type GoogleCloudStorageLocation = DatasetLocation & { + type: "GoogleCloudStorageLocation"; + bucketName?: any; + version?: any; +}; + +// @public +export type GoogleCloudStorageReadSettings = StoreReadSettings & { + type: "GoogleCloudStorageReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + prefix?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type GreenplumLinkedService = LinkedService & { + type: "Greenplum"; + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type GreenplumSource = TabularSource & { + type: "GreenplumSource"; + query?: any; +}; + +// @public +export type GreenplumTableDataset = Dataset & { + type: "GreenplumTable"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type HBaseAuthenticationType = string; + +// @public +export type HBaseLinkedService = LinkedService & { + type: "HBase"; + host: any; + port?: any; + httpPath?: any; + authenticationType: HBaseAuthenticationType; + username?: any; + password?: SecretBaseUnion; + enableSsl?: any; + trustedCertPath?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type HBaseObjectDataset = Dataset & { + type: "HBaseObject"; + tableName?: any; +}; + +// @public +export type HBaseSource = TabularSource & { + type: "HBaseSource"; + query?: any; +}; + +// @public +export type HdfsLinkedService = LinkedService & { + type: "Hdfs"; + url: any; + authenticationType?: any; + encryptedCredential?: any; + userName?: any; + password?: SecretBaseUnion; +}; + +// @public +export type HdfsLocation = DatasetLocation & { + type: "HdfsLocation"; +}; + +// @public +export type HdfsReadSettings = StoreReadSettings & { + type: "HdfsReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + enablePartitionDiscovery?: boolean; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; + distcpSettings?: DistcpSettings; +}; + +// @public +export type HdfsSource = CopySource & { + type: "HdfsSource"; + recursive?: any; + distcpSettings?: DistcpSettings; +}; + +// @public +export type HdiNodeTypes = string; + +// @public +export type HDInsightActivityDebugInfoOption = string; + +// @public +export type HDInsightHiveActivity = ExecutionActivity & { + type: "HDInsightHive"; + storageLinkedServices?: LinkedServiceReference[]; + arguments?: any[]; + getDebugInfo?: HDInsightActivityDebugInfoOption; + scriptPath?: any; + scriptLinkedService?: LinkedServiceReference; + defines?: { + [propertyName: string]: any; + }; + variables?: any[]; + queryTimeout?: number; +}; + +// @public +export type HDInsightLinkedService = LinkedService & { + type: "HDInsight"; + clusterUri: any; + userName?: any; + password?: SecretBaseUnion; + linkedServiceName?: LinkedServiceReference; + hcatalogLinkedServiceName?: LinkedServiceReference; + encryptedCredential?: any; + isEspEnabled?: any; + fileSystem?: any; +}; + +// @public +export type HDInsightMapReduceActivity = ExecutionActivity & { + type: "HDInsightMapReduce"; + storageLinkedServices?: LinkedServiceReference[]; + arguments?: any[]; + getDebugInfo?: HDInsightActivityDebugInfoOption; + className: any; + jarFilePath: any; + jarLinkedService?: LinkedServiceReference; + jarLibs?: any[]; + defines?: { + [propertyName: string]: any; + }; +}; + +// @public +export type HDInsightOnDemandLinkedService = LinkedService & { + type: "HDInsightOnDemand"; + clusterSize: any; + timeToLive: any; + version: any; + linkedServiceName: LinkedServiceReference; + hostSubscriptionId: any; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant: any; + clusterResourceGroup: any; + clusterNamePrefix?: any; + clusterUserName?: any; + clusterPassword?: SecretBaseUnion; + clusterSshUserName?: any; + clusterSshPassword?: SecretBaseUnion; + additionalLinkedServiceNames?: LinkedServiceReference[]; + hcatalogLinkedServiceName?: LinkedServiceReference; + clusterType?: any; + sparkVersion?: any; + coreConfiguration?: any; + hBaseConfiguration?: any; + hdfsConfiguration?: any; + hiveConfiguration?: any; + mapReduceConfiguration?: any; + oozieConfiguration?: any; + stormConfiguration?: any; + yarnConfiguration?: any; + encryptedCredential?: any; + headNodeSize?: any; + dataNodeSize?: any; + zookeeperNodeSize?: any; + scriptActions?: ScriptAction[]; + virtualNetworkId?: any; + subnetName?: any; +}; + +// @public +export type HDInsightPigActivity = ExecutionActivity & { + type: "HDInsightPig"; + storageLinkedServices?: LinkedServiceReference[]; + arguments?: any; + getDebugInfo?: HDInsightActivityDebugInfoOption; + scriptPath?: any; + scriptLinkedService?: LinkedServiceReference; + defines?: { + [propertyName: string]: any; + }; +}; + +// @public +export type HDInsightSparkActivity = ExecutionActivity & { + type: "HDInsightSpark"; + rootPath: any; + entryFilePath: any; + arguments?: any[]; + getDebugInfo?: HDInsightActivityDebugInfoOption; + sparkJobLinkedService?: LinkedServiceReference; + className?: string; + proxyUser?: any; + sparkConfig?: { + [propertyName: string]: any; + }; +}; + +// @public +export type HDInsightStreamingActivity = ExecutionActivity & { + type: "HDInsightStreaming"; + storageLinkedServices?: LinkedServiceReference[]; + arguments?: any[]; + getDebugInfo?: HDInsightActivityDebugInfoOption; + mapper: any; + reducer: any; + input: any; + output: any; + filePaths: any[]; + fileLinkedService?: LinkedServiceReference; + combiner?: any; + commandEnvironment?: any[]; + defines?: { + [propertyName: string]: any; + }; +}; + +// @public +export type HiveAuthenticationType = string; + +// @public +export type HiveLinkedService = LinkedService & { + type: "Hive"; + host: any; + port?: any; + serverType?: HiveServerType; + thriftTransportProtocol?: HiveThriftTransportProtocol; + authenticationType: HiveAuthenticationType; + serviceDiscoveryMode?: any; + zooKeeperNameSpace?: any; + useNativeQuery?: any; + username?: any; + password?: SecretBaseUnion; + httpPath?: any; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type HiveObjectDataset = Dataset & { + type: "HiveObject"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type HiveServerType = string; + +// @public +export type HiveSource = TabularSource & { + type: "HiveSource"; + query?: any; +}; + +// @public +export type HiveThriftTransportProtocol = string; + +// @public +export type HttpAuthenticationType = string; + +// @public +export type HttpLinkedService = LinkedService & { + type: "HttpServer"; + url: any; + authenticationType?: HttpAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + embeddedCertData?: any; + certThumbprint?: any; + encryptedCredential?: any; + enableServerCertificateValidation?: any; +}; + +// @public +export type HttpReadSettings = StoreReadSettings & { + type: "HttpReadSettings"; + requestMethod?: any; + requestBody?: any; + additionalHeaders?: any; + requestTimeout?: any; +}; + +// @public +export type HttpServerLocation = DatasetLocation & { + type: "HttpServerLocation"; + relativeUrl?: any; +}; + +// @public +export type HttpSource = CopySource & { + type: "HttpSource"; + httpRequestTimeout?: any; +}; + +// @public +export type HubspotLinkedService = LinkedService & { + type: "Hubspot"; + clientId: any; + clientSecret?: SecretBaseUnion; + accessToken?: SecretBaseUnion; + refreshToken?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type HubspotObjectDataset = Dataset & { + type: "HubspotObject"; + tableName?: any; +}; + +// @public +export type HubspotSource = TabularSource & { + type: "HubspotSource"; + query?: any; +}; + +// @public +export type IfConditionActivity = Activity & { + type: "IfCondition"; + expression: Expression; + ifTrueActivities?: ActivityUnion[]; + ifFalseActivities?: ActivityUnion[]; +}; + +// @public +export type ImpalaAuthenticationType = string; + +// @public +export type ImpalaLinkedService = LinkedService & { + type: "Impala"; + host: any; + port?: any; + authenticationType: ImpalaAuthenticationType; + username?: any; + password?: SecretBaseUnion; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type ImpalaObjectDataset = Dataset & { + type: "ImpalaObject"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type ImpalaSource = TabularSource & { + type: "ImpalaSource"; + query?: any; +}; + +// @public +export type InformixLinkedService = LinkedService & { + type: "Informix"; + connectionString: any; + authenticationType?: any; + credential?: SecretBaseUnion; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type InformixSink = CopySink & { + type: "InformixSink"; + preCopyScript?: any; +}; + +// @public +export type InformixSource = TabularSource & { + type: "InformixSource"; + query?: any; +}; + +// @public +export type InformixTableDataset = Dataset & { + type: "InformixTable"; + tableName?: any; +}; + +// @public +export interface IntegrationRuntime { + [property: string]: any; + description?: string; + type: "Managed" | "SelfHosted"; +} + +// @public +export interface IntegrationRuntimeComputeProperties { + [property: string]: any; + dataFlowProperties?: IntegrationRuntimeDataFlowProperties; + location?: string; + maxParallelExecutionsPerNode?: number; + nodeSize?: string; + numberOfNodes?: number; + vNetProperties?: IntegrationRuntimeVNetProperties; +} + +// @public +export interface IntegrationRuntimeCustomSetupScriptProperties { + blobContainerUri?: string; + sasToken?: SecureString; +} + +// @public +export interface IntegrationRuntimeDataFlowProperties { + [property: string]: any; + computeType?: DataFlowComputeType; + coreCount?: number; + timeToLive?: number; +} + +// @public +export interface IntegrationRuntimeDataProxyProperties { + connectVia?: EntityReference; + path?: string; + stagingLinkedService?: EntityReference; +} + +// @public +export type IntegrationRuntimeEdition = string; + +// @public +export type IntegrationRuntimeEntityReferenceType = string; + +// @public +export type IntegrationRuntimeLicenseType = string; + +// @public +export interface IntegrationRuntimeListResponse { + nextLink?: string; + value: IntegrationRuntimeResource[]; +} + +// @public +export interface IntegrationRuntimeReference { + parameters?: { + [propertyName: string]: any; + }; + referenceName: string; + type: IntegrationRuntimeReferenceType; +} + +// @public +export type IntegrationRuntimeReferenceType = string; + +// @public +export type IntegrationRuntimeResource = AzureEntityResource & { + properties: IntegrationRuntimeUnion; +}; + +// @public +export type IntegrationRuntimesGetResponse = IntegrationRuntimeResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: IntegrationRuntimeResource; + }; +}; + +// @public +export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: IntegrationRuntimeListResponse; + }; +}; + +// @public +export class IntegrationRuntimesOperation { + constructor(client: ArtifactsClient); + get(integrationRuntimeName: string, options?: coreHttp.OperationOptions): Promise; + list(options?: coreHttp.OperationOptions): Promise; +} + +// @public +export interface IntegrationRuntimeSsisCatalogInfo { + [property: string]: any; + catalogAdminPassword?: SecureString; + catalogAdminUserName?: string; + catalogPricingTier?: IntegrationRuntimeSsisCatalogPricingTier; + catalogServerEndpoint?: string; +} + +// @public +export type IntegrationRuntimeSsisCatalogPricingTier = string; + +// @public +export interface IntegrationRuntimeSsisProperties { + [property: string]: any; + catalogInfo?: IntegrationRuntimeSsisCatalogInfo; + customSetupScriptProperties?: IntegrationRuntimeCustomSetupScriptProperties; + dataProxyProperties?: IntegrationRuntimeDataProxyProperties; + edition?: IntegrationRuntimeEdition; + expressCustomSetupProperties?: CustomSetupBase[]; + licenseType?: IntegrationRuntimeLicenseType; +} + +// @public +export type IntegrationRuntimeState = string; + +// @public +export type IntegrationRuntimeType = string; + +// @public (undocumented) +export type IntegrationRuntimeUnion = IntegrationRuntime | ManagedIntegrationRuntime | SelfHostedIntegrationRuntime; + +// @public +export interface IntegrationRuntimeVNetProperties { + [property: string]: any; + publicIPs?: string[]; + subnet?: string; + vNetId?: string; +} + +// @public +export type JiraLinkedService = LinkedService & { + type: "Jira"; + host: any; + port?: any; + username: any; + password?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type JiraObjectDataset = Dataset & { + type: "JiraObject"; + tableName?: any; +}; + +// @public +export type JiraSource = TabularSource & { + type: "JiraSource"; + query?: any; +}; + +// @public +export type JsonDataset = Dataset & { + type: "Json"; + location?: DatasetLocationUnion; + encodingName?: any; + compression?: DatasetCompressionUnion; +}; + +// @public +export type JsonFormat = DatasetStorageFormat & { + type: "JsonFormat"; + filePattern?: JsonFormatFilePattern; + nestingSeparator?: any; + encodingName?: any; + jsonNodeReference?: any; + jsonPathDefinition?: any; +}; + +// @public +export type JsonFormatFilePattern = string; + +// @public +export type JsonSink = CopySink & { + type: "JsonSink"; + storeSettings?: StoreWriteSettingsUnion; + formatSettings?: JsonWriteSettings; +}; + +// @public +export type JsonSource = CopySource & { + type: "JsonSource"; + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export type JsonWriteFilePattern = string; + +// @public +export type JsonWriteSettings = FormatWriteSettings & { + type: "JsonWriteSettings"; + filePattern?: JsonWriteFilePattern; +}; + +// @public +export const enum KnownAvroCompressionCodec { + // (undocumented) + Bzip2 = "bzip2", + // (undocumented) + Deflate = "deflate", + // (undocumented) + None = "none", + // (undocumented) + Snappy = "snappy", + // (undocumented) + Xz = "xz" +} + +// @public +export const enum KnownAzureFunctionActivityMethod { + // (undocumented) + Delete = "DELETE", + // (undocumented) + GET = "GET", + // (undocumented) + Head = "HEAD", + // (undocumented) + Options = "OPTIONS", + // (undocumented) + Post = "POST", + // (undocumented) + PUT = "PUT", + // (undocumented) + Trace = "TRACE" +} + +// @public +export const enum KnownAzureSearchIndexWriteBehaviorType { + // (undocumented) + Merge = "Merge", + // (undocumented) + Upload = "Upload" +} + +// @public +export const enum KnownBigDataPoolReferenceType { + // (undocumented) + BigDataPoolReference = "BigDataPoolReference" +} + +// @public +export const enum KnownBlobEventType { + // (undocumented) + MicrosoftStorageBlobCreated = "Microsoft.Storage.BlobCreated", + // (undocumented) + MicrosoftStorageBlobDeleted = "Microsoft.Storage.BlobDeleted" +} + +// @public +export const enum KnownCassandraSourceReadConsistencyLevels { + // (undocumented) + ALL = "ALL", + // (undocumented) + EachQuorum = "EACH_QUORUM", + // (undocumented) + LocalONE = "LOCAL_ONE", + // (undocumented) + LocalQuorum = "LOCAL_QUORUM", + // (undocumented) + LocalSerial = "LOCAL_SERIAL", + // (undocumented) + ONE = "ONE", + // (undocumented) + Quorum = "QUORUM", + // (undocumented) + Serial = "SERIAL", + // (undocumented) + Three = "THREE", + // (undocumented) + TWO = "TWO" +} + +// @public +export const enum KnownCellOutputType { + // (undocumented) + DisplayData = "display_data", + // (undocumented) + Error = "error", + // (undocumented) + ExecuteResult = "execute_result", + // (undocumented) + Stream = "stream" +} + +// @public +export const enum KnownCopyBehaviorType { + // (undocumented) + FlattenHierarchy = "FlattenHierarchy", + // (undocumented) + MergeFiles = "MergeFiles", + // (undocumented) + PreserveHierarchy = "PreserveHierarchy" +} + +// @public +export const enum KnownDataFlowComputeType { + // (undocumented) + ComputeOptimized = "ComputeOptimized", + // (undocumented) + General = "General", + // (undocumented) + MemoryOptimized = "MemoryOptimized" +} + +// @public +export const enum KnownDataFlowReferenceType { + // (undocumented) + DataFlowReference = "DataFlowReference" +} + +// @public +export const enum KnownDatasetCompressionLevel { + // (undocumented) + Fastest = "Fastest", + // (undocumented) + Optimal = "Optimal" +} + +// @public +export const enum KnownDatasetReferenceType { + // (undocumented) + DatasetReference = "DatasetReference" +} + +// @public +export const enum KnownDb2AuthenticationType { + // (undocumented) + Basic = "Basic" +} + +// @public +export const enum KnownDelimitedTextCompressionCodec { + // (undocumented) + Bzip2 = "bzip2", + // (undocumented) + Deflate = "deflate", + // (undocumented) + Gzip = "gzip", + // (undocumented) + Lz4 = "lz4", + // (undocumented) + Snappy = "snappy", + // (undocumented) + ZipDeflate = "zipDeflate" +} + +// @public +export const enum KnownDependencyCondition { + // (undocumented) + Completed = "Completed", + // (undocumented) + Failed = "Failed", + // (undocumented) + Skipped = "Skipped", + // (undocumented) + Succeeded = "Succeeded" +} + +// @public +export const enum KnownDynamicsAuthenticationType { + // (undocumented) + AADServicePrincipal = "AADServicePrincipal", + // (undocumented) + Ifd = "Ifd", + // (undocumented) + Office365 = "Office365" +} + +// @public +export const enum KnownDynamicsDeploymentType { + // (undocumented) + Online = "Online", + // (undocumented) + OnPremisesWithIfd = "OnPremisesWithIfd" +} + +// @public +export const enum KnownDynamicsServicePrincipalCredentialType { + // (undocumented) + ServicePrincipalCert = "ServicePrincipalCert", + // (undocumented) + ServicePrincipalKey = "ServicePrincipalKey" +} + +// @public +export const enum KnownDynamicsSinkWriteBehavior { + // (undocumented) + Upsert = "Upsert" +} + +// @public +export const enum KnownEventSubscriptionStatus { + // (undocumented) + Deprovisioning = "Deprovisioning", + // (undocumented) + Disabled = "Disabled", + // (undocumented) + Enabled = "Enabled", + // (undocumented) + Provisioning = "Provisioning", + // (undocumented) + Unknown = "Unknown" +} + +// @public +export const enum KnownExpressionType { + // (undocumented) + Expression = "Expression" +} + +// @public +export const enum KnownFtpAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic" +} + +// @public +export const enum KnownGoogleAdWordsAuthenticationType { + // (undocumented) + ServiceAuthentication = "ServiceAuthentication", + // (undocumented) + UserAuthentication = "UserAuthentication" +} + +// @public +export const enum KnownGoogleBigQueryAuthenticationType { + // (undocumented) + ServiceAuthentication = "ServiceAuthentication", + // (undocumented) + UserAuthentication = "UserAuthentication" +} + +// @public +export const enum KnownHBaseAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic" +} + +// @public +export const enum KnownHdiNodeTypes { + // (undocumented) + Headnode = "Headnode", + // (undocumented) + Workernode = "Workernode", + // (undocumented) + Zookeeper = "Zookeeper" +} + +// @public +export const enum KnownHDInsightActivityDebugInfoOption { + // (undocumented) + Always = "Always", + // (undocumented) + Failure = "Failure", + // (undocumented) + None = "None" +} + +// @public +export const enum KnownHiveAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Username = "Username", + // (undocumented) + UsernameAndPassword = "UsernameAndPassword", + // (undocumented) + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +// @public +export const enum KnownHiveServerType { + // (undocumented) + HiveServer1 = "HiveServer1", + // (undocumented) + HiveServer2 = "HiveServer2", + // (undocumented) + HiveThriftServer = "HiveThriftServer" +} + +// @public +export const enum KnownHiveThriftTransportProtocol { + // (undocumented) + Binary = "Binary", + // (undocumented) + Http = "HTTP ", + // (undocumented) + Sasl = "SASL" +} + +// @public +export const enum KnownHttpAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic", + // (undocumented) + ClientCertificate = "ClientCertificate", + // (undocumented) + Digest = "Digest", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownImpalaAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + SaslUsername = "SASLUsername", + // (undocumented) + UsernameAndPassword = "UsernameAndPassword" +} + +// @public +export const enum KnownIntegrationRuntimeEdition { + // (undocumented) + Enterprise = "Enterprise", + // (undocumented) + Standard = "Standard" +} + +// @public +export const enum KnownIntegrationRuntimeEntityReferenceType { + // (undocumented) + IntegrationRuntimeReference = "IntegrationRuntimeReference", + // (undocumented) + LinkedServiceReference = "LinkedServiceReference" +} + +// @public +export const enum KnownIntegrationRuntimeLicenseType { + // (undocumented) + BasePrice = "BasePrice", + // (undocumented) + LicenseIncluded = "LicenseIncluded" +} + +// @public +export const enum KnownIntegrationRuntimeReferenceType { + // (undocumented) + IntegrationRuntimeReference = "IntegrationRuntimeReference" +} + +// @public +export const enum KnownIntegrationRuntimeSsisCatalogPricingTier { + // (undocumented) + Basic = "Basic", + // (undocumented) + Premium = "Premium", + // (undocumented) + PremiumRS = "PremiumRS", + // (undocumented) + Standard = "Standard" +} + +// @public +export const enum KnownIntegrationRuntimeState { + // (undocumented) + AccessDenied = "AccessDenied", + // (undocumented) + Initial = "Initial", + // (undocumented) + Limited = "Limited", + // (undocumented) + NeedRegistration = "NeedRegistration", + // (undocumented) + Offline = "Offline", + // (undocumented) + Online = "Online", + // (undocumented) + Started = "Started", + // (undocumented) + Starting = "Starting", + // (undocumented) + Stopped = "Stopped", + // (undocumented) + Stopping = "Stopping" +} + +// @public +export const enum KnownIntegrationRuntimeType { + // (undocumented) + Managed = "Managed", + // (undocumented) + SelfHosted = "SelfHosted" +} + +// @public +export const enum KnownJsonFormatFilePattern { + // (undocumented) + ArrayOfObjects = "arrayOfObjects", + // (undocumented) + SetOfObjects = "setOfObjects" +} + +// @public +export const enum KnownJsonWriteFilePattern { + // (undocumented) + ArrayOfObjects = "arrayOfObjects", + // (undocumented) + SetOfObjects = "setOfObjects" +} + +// @public +export const enum KnownMongoDbAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic" +} + +// @public +export const enum KnownNetezzaPartitionOption { + // (undocumented) + DataSlice = "DataSlice", + // (undocumented) + DynamicRange = "DynamicRange", + // (undocumented) + None = "None" +} + +// @public +export const enum KnownNodeSize { + // (undocumented) + Large = "Large", + // (undocumented) + Medium = "Medium", + // (undocumented) + None = "None", + // (undocumented) + Small = "Small", + // (undocumented) + XLarge = "XLarge", + // (undocumented) + XXLarge = "XXLarge", + // (undocumented) + XXXLarge = "XXXLarge" +} + +// @public +export const enum KnownNodeSizeFamily { + // (undocumented) + MemoryOptimized = "MemoryOptimized", + // (undocumented) + None = "None" +} + +// @public +export const enum KnownNotebookReferenceType { + // (undocumented) + NotebookReference = "NotebookReference" +} + +// @public +export const enum KnownODataAadServicePrincipalCredentialType { + // (undocumented) + ServicePrincipalCert = "ServicePrincipalCert", + // (undocumented) + ServicePrincipalKey = "ServicePrincipalKey" +} + +// @public +export const enum KnownODataAuthenticationType { + // (undocumented) + AadServicePrincipal = "AadServicePrincipal", + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic", + // (undocumented) + ManagedServiceIdentity = "ManagedServiceIdentity", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownOraclePartitionOption { + // (undocumented) + DynamicRange = "DynamicRange", + // (undocumented) + None = "None", + // (undocumented) + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable" +} + +// @public +export const enum KnownOrcCompressionCodec { + // (undocumented) + None = "none", + // (undocumented) + Snappy = "snappy", + // (undocumented) + Zlib = "zlib" +} + +// @public +export const enum KnownParameterType { + // (undocumented) + Array = "Array", + // (undocumented) + Bool = "Bool", + // (undocumented) + Float = "Float", + // (undocumented) + Int = "Int", + // (undocumented) + Object = "Object", + // (undocumented) + SecureString = "SecureString", + // (undocumented) + String = "String" +} + +// @public +export const enum KnownParquetCompressionCodec { + // (undocumented) + Gzip = "gzip", + // (undocumented) + Lzo = "lzo", + // (undocumented) + None = "none", + // (undocumented) + Snappy = "snappy" +} + +// @public +export const enum KnownPhoenixAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + UsernameAndPassword = "UsernameAndPassword", + // (undocumented) + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +// @public +export const enum KnownPipelineReferenceType { + // (undocumented) + PipelineReference = "PipelineReference" +} + +// @public +export const enum KnownPluginCurrentState { + // (undocumented) + Cleanup = "Cleanup", + // (undocumented) + Ended = "Ended", + // (undocumented) + Monitoring = "Monitoring", + // (undocumented) + Preparation = "Preparation", + // (undocumented) + Queued = "Queued", + // (undocumented) + ResourceAcquisition = "ResourceAcquisition", + // (undocumented) + Submission = "Submission" +} + +// @public +export const enum KnownPolybaseSettingsRejectType { + // (undocumented) + Percentage = "percentage", + // (undocumented) + Value = "value" +} + +// @public +export const enum KnownPrestoAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Ldap = "LDAP" +} + +// @public +export const enum KnownRecurrenceFrequency { + // (undocumented) + Day = "Day", + // (undocumented) + Hour = "Hour", + // (undocumented) + Minute = "Minute", + // (undocumented) + Month = "Month", + // (undocumented) + NotSpecified = "NotSpecified", + // (undocumented) + Week = "Week", + // (undocumented) + Year = "Year" +} + +// @public +export const enum KnownRestServiceAuthenticationType { + // (undocumented) + AadServicePrincipal = "AadServicePrincipal", + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic", + // (undocumented) + ManagedServiceIdentity = "ManagedServiceIdentity" +} + +// @public +export const enum KnownRunQueryFilterOperand { + // (undocumented) + ActivityName = "ActivityName", + // (undocumented) + ActivityRunEnd = "ActivityRunEnd", + // (undocumented) + ActivityRunStart = "ActivityRunStart", + // (undocumented) + ActivityType = "ActivityType", + // (undocumented) + LatestOnly = "LatestOnly", + // (undocumented) + PipelineName = "PipelineName", + // (undocumented) + RunEnd = "RunEnd", + // (undocumented) + RunGroupId = "RunGroupId", + // (undocumented) + RunStart = "RunStart", + // (undocumented) + Status = "Status", + // (undocumented) + TriggerName = "TriggerName", + // (undocumented) + TriggerRunTimestamp = "TriggerRunTimestamp" +} + +// @public +export const enum KnownRunQueryFilterOperator { + // (undocumented) + Equals = "Equals", + // (undocumented) + In = "In", + // (undocumented) + NotEquals = "NotEquals", + // (undocumented) + NotIn = "NotIn" +} + +// @public +export const enum KnownRunQueryOrder { + // (undocumented) + ASC = "ASC", + // (undocumented) + Desc = "DESC" +} + +// @public +export const enum KnownRunQueryOrderByField { + // (undocumented) + ActivityName = "ActivityName", + // (undocumented) + ActivityRunEnd = "ActivityRunEnd", + // (undocumented) + ActivityRunStart = "ActivityRunStart", + // (undocumented) + PipelineName = "PipelineName", + // (undocumented) + RunEnd = "RunEnd", + // (undocumented) + RunStart = "RunStart", + // (undocumented) + Status = "Status", + // (undocumented) + TriggerName = "TriggerName", + // (undocumented) + TriggerRunTimestamp = "TriggerRunTimestamp" +} + +// @public +export const enum KnownSalesforceSinkWriteBehavior { + // (undocumented) + Insert = "Insert", + // (undocumented) + Upsert = "Upsert" +} + +// @public +export const enum KnownSalesforceSourceReadBehavior { + // (undocumented) + Query = "Query", + // (undocumented) + QueryAll = "QueryAll" +} + +// @public +export const enum KnownSapCloudForCustomerSinkWriteBehavior { + // (undocumented) + Insert = "Insert", + // (undocumented) + Update = "Update" +} + +// @public +export const enum KnownSapHanaAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownSapHanaPartitionOption { + // (undocumented) + None = "None", + // (undocumented) + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", + // (undocumented) + SapHanaDynamicRange = "SapHanaDynamicRange" +} + +// @public +export const enum KnownSapTablePartitionOption { + // (undocumented) + None = "None", + // (undocumented) + PartitionOnCalendarDate = "PartitionOnCalendarDate", + // (undocumented) + PartitionOnCalendarMonth = "PartitionOnCalendarMonth", + // (undocumented) + PartitionOnCalendarYear = "PartitionOnCalendarYear", + // (undocumented) + PartitionOnInt = "PartitionOnInt", + // (undocumented) + PartitionOnTime = "PartitionOnTime" +} + +// @public +export const enum KnownSchedulerCurrentState { + // (undocumented) + Ended = "Ended", + // (undocumented) + Queued = "Queued", + // (undocumented) + Scheduled = "Scheduled" +} + +// @public +export const enum KnownServiceNowAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + OAuth2 = "OAuth2" +} + +// @public +export const enum KnownSftpAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + SshPublicKey = "SshPublicKey" +} + +// @public +export const enum KnownSparkAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Username = "Username", + // (undocumented) + UsernameAndPassword = "UsernameAndPassword", + // (undocumented) + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +// @public +export const enum KnownSparkBatchJobResultType { + // (undocumented) + Cancelled = "Cancelled", + // (undocumented) + Failed = "Failed", + // (undocumented) + Succeeded = "Succeeded", + // (undocumented) + Uncertain = "Uncertain" +} + +// @public +export const enum KnownSparkErrorSource { + // (undocumented) + Dependency = "Dependency", + // (undocumented) + System = "System", + // (undocumented) + Unknown = "Unknown", + // (undocumented) + User = "User" +} + +// @public +export const enum KnownSparkJobReferenceType { + // (undocumented) + SparkJobDefinitionReference = "SparkJobDefinitionReference" +} + +// @public +export const enum KnownSparkJobType { + // (undocumented) + SparkBatch = "SparkBatch", + // (undocumented) + SparkSession = "SparkSession" +} + +// @public +export const enum KnownSparkServerType { + // (undocumented) + SharkServer = "SharkServer", + // (undocumented) + SharkServer2 = "SharkServer2", + // (undocumented) + SparkThriftServer = "SparkThriftServer" +} + +// @public +export const enum KnownSparkThriftTransportProtocol { + // (undocumented) + Binary = "Binary", + // (undocumented) + Http = "HTTP ", + // (undocumented) + Sasl = "SASL" +} + +// @public +export const enum KnownSqlConnectionType { + // (undocumented) + SqlOnDemand = "SqlOnDemand", + // (undocumented) + SqlPool = "SqlPool" +} + +// @public +export const enum KnownSqlPoolReferenceType { + // (undocumented) + SqlPoolReference = "SqlPoolReference" +} + +// @public +export const enum KnownSqlScriptType { + // (undocumented) + SqlQuery = "SqlQuery" +} + +// @public +export const enum KnownSsisLogLocationType { + // (undocumented) + File = "File" +} + +// @public +export const enum KnownSsisPackageLocationType { + // (undocumented) + File = "File", + // (undocumented) + InlinePackage = "InlinePackage", + // (undocumented) + Ssisdb = "SSISDB" +} + +// @public +export const enum KnownStoredProcedureParameterType { + // (undocumented) + Boolean = "Boolean", + // (undocumented) + Date = "Date", + // (undocumented) + Decimal = "Decimal", + // (undocumented) + Guid = "Guid", + // (undocumented) + Int = "Int", + // (undocumented) + Int64 = "Int64", + // (undocumented) + String = "String" +} + +// @public +export const enum KnownSybaseAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownTeradataAuthenticationType { + // (undocumented) + Basic = "Basic", + // (undocumented) + Windows = "Windows" +} + +// @public +export const enum KnownTeradataPartitionOption { + // (undocumented) + DynamicRange = "DynamicRange", + // (undocumented) + Hash = "Hash", + // (undocumented) + None = "None" +} + +// @public +export const enum KnownTriggerReferenceType { + // (undocumented) + TriggerReference = "TriggerReference" +} + +// @public +export const enum KnownTriggerRunStatus { + // (undocumented) + Failed = "Failed", + // (undocumented) + Inprogress = "Inprogress", + // (undocumented) + Succeeded = "Succeeded" +} + +// @public +export const enum KnownTriggerRuntimeState { + // (undocumented) + Disabled = "Disabled", + // (undocumented) + Started = "Started", + // (undocumented) + Stopped = "Stopped" +} + +// @public +export const enum KnownTumblingWindowFrequency { + // (undocumented) + Hour = "Hour", + // (undocumented) + Minute = "Minute" +} + +// @public +export const enum KnownType { + // (undocumented) + LinkedServiceReference = "LinkedServiceReference" +} + +// @public +export const enum KnownVariableType { + // (undocumented) + Array = "Array", + // (undocumented) + Bool = "Bool", + // (undocumented) + Boolean = "Boolean", + // (undocumented) + String = "String" +} + +// @public +export const enum KnownWebActivityMethod { + // (undocumented) + Delete = "DELETE", + // (undocumented) + GET = "GET", + // (undocumented) + Post = "POST", + // (undocumented) + PUT = "PUT" +} + +// @public +export const enum KnownWebAuthenticationType { + // (undocumented) + Anonymous = "Anonymous", + // (undocumented) + Basic = "Basic", + // (undocumented) + ClientCertificate = "ClientCertificate" +} + +// @public +export const enum KnownWebHookActivityMethod { + // (undocumented) + Post = "POST" +} + +// @public +export interface LibraryRequirements { + content?: string; + filename?: string; + readonly time?: Date; +} + +// @public +export type LinkedIntegrationRuntimeKeyAuthorization = LinkedIntegrationRuntimeType & { + authorizationType: "Key"; + key: SecureString; +}; + +// @public +export type LinkedIntegrationRuntimeRbacAuthorization = LinkedIntegrationRuntimeType & { + authorizationType: "RBAC"; + resourceId: string; +}; + +// @public +export interface LinkedIntegrationRuntimeType { + authorizationType: "Key" | "RBAC"; +} + +// @public (undocumented) +export type LinkedIntegrationRuntimeTypeUnion = LinkedIntegrationRuntimeType | LinkedIntegrationRuntimeKeyAuthorization | LinkedIntegrationRuntimeRbacAuthorization; + +// @public +export interface LinkedService { + [property: string]: any; + annotations?: any[]; + connectVia?: IntegrationRuntimeReference; + description?: string; + parameters?: { + [propertyName: string]: ParameterSpecification; + }; + type: "AzureStorage" | "AzureBlobStorage" | "AzureTableStorage" | "AzureSqlDW" | "SqlServer" | "AzureSqlDatabase" | "AzureSqlMI" | "AzureBatch" | "AzureKeyVault" | "CosmosDb" | "Dynamics" | "DynamicsCrm" | "CommonDataServiceForApps" | "HDInsight" | "FileServer" | "AzureFileStorage" | "GoogleCloudStorage" | "Oracle" | "AzureMySql" | "MySql" | "PostgreSql" | "Sybase" | "Db2" | "Teradata" | "AzureML" | "AzureMLService" | "Odbc" | "Informix" | "MicrosoftAccess" | "Hdfs" | "OData" | "Web" | "Cassandra" | "MongoDb" | "MongoDbV2" | "CosmosDbMongoDbApi" | "AzureDataLakeStore" | "AzureBlobFS" | "Office365" | "Salesforce" | "SalesforceServiceCloud" | "SapCloudForCustomer" | "SapEcc" | "SapOpenHub" | "RestService" | "AmazonS3" | "AmazonRedshift" | "CustomDataSource" | "AzureSearch" | "HttpServer" | "FtpServer" | "Sftp" | "SapBW" | "SapHana" | "AmazonMWS" | "AzurePostgreSql" | "Concur" | "Couchbase" | "Drill" | "Eloqua" | "GoogleBigQuery" | "Greenplum" | "HBase" | "Hive" | "Hubspot" | "Impala" | "Jira" | "Magento" | "MariaDB" | "AzureMariaDB" | "Marketo" | "Paypal" | "Phoenix" | "Presto" | "QuickBooks" | "ServiceNow" | "Shopify" | "Spark" | "Square" | "Xero" | "Zoho" | "Vertica" | "Netezza" | "SalesforceMarketingCloud" | "HDInsightOnDemand" | "AzureDataLakeAnalytics" | "AzureDatabricks" | "Responsys" | "DynamicsAX" | "OracleServiceCloud" | "GoogleAdWords" | "SapTable" | "AzureDataExplorer" | "AzureFunction"; +} + +// @public +export interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: LinkedServiceResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export type LinkedServiceDebugResource = SubResourceDebugResource & { + properties: LinkedServiceUnion; +}; + +// @public +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: LinkedServiceResource; + }; +}; + +// @public +export type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: LinkedServiceListResponse; + }; +}; + +// @public +export type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: LinkedServiceListResponse; + }; +}; + +// @public +export interface LinkedServiceListResponse { + nextLink?: string; + value: LinkedServiceResource[]; +} + +// @public +export class LinkedServiceOperation { + constructor(client: ArtifactsClient); + createOrUpdateLinkedService(linkedServiceName: string, linkedService: LinkedServiceResource, options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams): Promise>; + deleteLinkedService(linkedServiceName: string, options?: coreHttp.OperationOptions): Promise>; + getLinkedService(linkedServiceName: string, options?: LinkedServiceGetLinkedServiceOptionalParams): Promise; + listLinkedServicesByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameLinkedService(linkedServiceName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + +// @public +export interface LinkedServiceReference { + parameters?: { + [propertyName: string]: any; + }; + referenceName: string; + type: Type; +} + +// @public +export type LinkedServiceResource = AzureEntityResource & { + properties: LinkedServiceUnion; +}; + +// @public (undocumented) +export type LinkedServiceUnion = LinkedService | AzureStorageLinkedService | AzureBlobStorageLinkedService | AzureTableStorageLinkedService | AzureSqlDWLinkedService | SqlServerLinkedService | AzureSqlDatabaseLinkedService | AzureSqlMILinkedService | AzureBatchLinkedService | AzureKeyVaultLinkedService | CosmosDbLinkedService | DynamicsLinkedService | DynamicsCrmLinkedService | CommonDataServiceForAppsLinkedService | HDInsightLinkedService | FileServerLinkedService | AzureFileStorageLinkedService | GoogleCloudStorageLinkedService | OracleLinkedService | AzureMySqlLinkedService | MySqlLinkedService | PostgreSqlLinkedService | SybaseLinkedService | Db2LinkedService | TeradataLinkedService | AzureMLLinkedService | AzureMLServiceLinkedService | OdbcLinkedService | InformixLinkedService | MicrosoftAccessLinkedService | HdfsLinkedService | ODataLinkedService | WebLinkedService | CassandraLinkedService | MongoDbLinkedService | MongoDbV2LinkedService | CosmosDbMongoDbApiLinkedService | AzureDataLakeStoreLinkedService | AzureBlobFSLinkedService | Office365LinkedService | SalesforceLinkedService | SalesforceServiceCloudLinkedService | SapCloudForCustomerLinkedService | SapEccLinkedService | SapOpenHubLinkedService | RestServiceLinkedService | AmazonS3LinkedService | AmazonRedshiftLinkedService | CustomDataSourceLinkedService | AzureSearchLinkedService | HttpLinkedService | FtpServerLinkedService | SftpServerLinkedService | SapBWLinkedService | SapHanaLinkedService | AmazonMWSLinkedService | AzurePostgreSqlLinkedService | ConcurLinkedService | CouchbaseLinkedService | DrillLinkedService | EloquaLinkedService | GoogleBigQueryLinkedService | GreenplumLinkedService | HBaseLinkedService | HiveLinkedService | HubspotLinkedService | ImpalaLinkedService | JiraLinkedService | MagentoLinkedService | MariaDBLinkedService | AzureMariaDBLinkedService | MarketoLinkedService | PaypalLinkedService | PhoenixLinkedService | PrestoLinkedService | QuickBooksLinkedService | ServiceNowLinkedService | ShopifyLinkedService | SparkLinkedService | SquareLinkedService | XeroLinkedService | ZohoLinkedService | VerticaLinkedService | NetezzaLinkedService | SalesforceMarketingCloudLinkedService | HDInsightOnDemandLinkedService | AzureDataLakeAnalyticsLinkedService | AzureDatabricksLinkedService | ResponsysLinkedService | DynamicsAXLinkedService | OracleServiceCloudLinkedService | GoogleAdWordsLinkedService | SapTableLinkedService | AzureDataExplorerLinkedService | AzureFunctionLinkedService; + +// @public +export interface LogStorageSettings { + [property: string]: any; + linkedServiceName: LinkedServiceReference; + path?: any; +} + +// @public +export type LookupActivity = ExecutionActivity & { + type: "Lookup"; + source: CopySourceUnion; + dataset: DatasetReference; + firstRowOnly?: any; +}; + +// @public +export type LROOperationResponse = HttpOperationResponse & { + [LROSYM]?: LROResponseInfo; +}; + +// @public (undocumented) +export interface LROOperationState extends PollOperationState { + // (undocumented) + finalStateVia?: FinalStateVia; + // (undocumented) + initialOperation: LROOperationStep; + // (undocumented) + lastOperation: LROOperationStep; + // (undocumented) + pollingStrategy: LROStrategy; +} + +// @public (undocumented) +export interface LROOperationStep { + // (undocumented) + args: OperationArguments; + // (undocumented) + result: TResult; + // (undocumented) + spec: OperationSpec; +} + +// @public (undocumented) +export class LROPoller extends Poller, TResult> { + constructor({ initialOperationArguments, initialOperationResult, initialOperationSpec, sendOperation, finalStateVia, intervalInMs }: LROPollerOptions); + delay(): Promise; + } + +// @public (undocumented) +export interface LROPollerOptions { + finalStateVia?: FinalStateVia; + initialOperationArguments: OperationArguments; + initialOperationResult: TResult; + initialOperationSpec: OperationSpec; + intervalInMs?: number; + sendOperation: SendOperationFn; +} + +// @public (undocumented) +export interface LROResponseInfo { + // (undocumented) + azureAsyncOperation?: string; + // (undocumented) + isInitialRequest?: boolean; + // (undocumented) + location?: string; + // (undocumented) + operationLocation?: string; + // (undocumented) + provisioningState?: string; + // (undocumented) + requestMethod: HttpMethods; + // (undocumented) + status?: string; + // (undocumented) + statusCode: number; +} + +// @public (undocumented) +export interface LROStrategy { + // (undocumented) + isTerminal: () => boolean; + // (undocumented) + poll: () => Promise>; + // (undocumented) + sendFinalRequest: () => Promise>; +} + +// @public +export type MagentoLinkedService = LinkedService & { + type: "Magento"; + host: any; + accessToken?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type MagentoObjectDataset = Dataset & { + type: "MagentoObject"; + tableName?: any; +}; + +// @public +export type MagentoSource = TabularSource & { + type: "MagentoSource"; + query?: any; +}; + +// @public +export interface ManagedIdentity { + readonly principalId?: string; + readonly tenantId?: string; + type?: ResourceIdentityType; +} + +// @public +export type ManagedIntegrationRuntime = IntegrationRuntime & { + type: "Managed"; + readonly state?: IntegrationRuntimeState; + computeProperties?: IntegrationRuntimeComputeProperties; + ssisProperties?: IntegrationRuntimeSsisProperties; +}; + +// @public +export interface ManagedVirtualNetworkSettings { + allowedAadTenantIdsForLinking?: string[]; + linkedAccessCheckOnTargetResource?: boolean; + preventDataExfiltration?: boolean; +} + +// @public +export type MappingDataFlow = DataFlow & { + type: "MappingDataFlow"; + sources?: DataFlowSource[]; + sinks?: DataFlowSink[]; + transformations?: Transformation[]; + script?: string; +}; + +// @public +export type MariaDBLinkedService = LinkedService & { + type: "MariaDB"; + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type MariaDBSource = TabularSource & { + type: "MariaDBSource"; + query?: any; +}; + +// @public +export type MariaDBTableDataset = Dataset & { + type: "MariaDBTable"; + tableName?: any; +}; + +// @public +export type MarketoLinkedService = LinkedService & { + type: "Marketo"; + endpoint: any; + clientId: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type MarketoObjectDataset = Dataset & { + type: "MarketoObject"; + tableName?: any; +}; + +// @public +export type MarketoSource = TabularSource & { + type: "MarketoSource"; + query?: any; +}; + +// @public +export type MicrosoftAccessLinkedService = LinkedService & { + type: "MicrosoftAccess"; + connectionString: any; + authenticationType?: any; + credential?: SecretBaseUnion; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type MicrosoftAccessSink = CopySink & { + type: "MicrosoftAccessSink"; + preCopyScript?: any; +}; + +// @public +export type MicrosoftAccessSource = CopySource & { + type: "MicrosoftAccessSource"; + query?: any; +}; + +// @public +export type MicrosoftAccessTableDataset = Dataset & { + type: "MicrosoftAccessTable"; + tableName?: any; +}; + +// @public +export type MongoDbAuthenticationType = string; + +// @public +export type MongoDbCollectionDataset = Dataset & { + type: "MongoDbCollection"; + collectionName: any; +}; + +// @public +export interface MongoDbCursorMethodsProperties { + [property: string]: any; + limit?: any; + project?: any; + skip?: any; + sort?: any; +} + +// @public +export type MongoDbLinkedService = LinkedService & { + type: "MongoDb"; + server: any; + authenticationType?: MongoDbAuthenticationType; + databaseName: any; + username?: any; + password?: SecretBaseUnion; + authSource?: any; + port?: any; + enableSsl?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type MongoDbSource = CopySource & { + type: "MongoDbSource"; + query?: any; +}; + +// @public +export type MongoDbV2CollectionDataset = Dataset & { + type: "MongoDbV2Collection"; + collection: any; +}; + +// @public +export type MongoDbV2LinkedService = LinkedService & { + type: "MongoDbV2"; + connectionString: any; + database: any; +}; + +// @public +export type MongoDbV2Source = CopySource & { + type: "MongoDbV2Source"; + filter?: any; + cursorMethods?: MongoDbCursorMethodsProperties; + batchSize?: any; + queryTimeout?: any; +}; + +// @public +export type MultiplePipelineTrigger = Trigger & { + type: "MultiplePipelineTrigger"; + pipelines?: TriggerPipelineReference[]; +}; + +// @public (undocumented) +export type MultiplePipelineTriggerUnion = MultiplePipelineTrigger | ScheduleTrigger | BlobTrigger | BlobEventsTrigger; + +// @public +export type MySqlLinkedService = LinkedService & { + type: "MySql"; + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type MySqlSource = TabularSource & { + type: "MySqlSource"; + query?: any; +}; + +// @public +export type MySqlTableDataset = Dataset & { + type: "MySqlTable"; + tableName?: any; +}; + +// @public +export type NetezzaLinkedService = LinkedService & { + type: "Netezza"; + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type NetezzaPartitionOption = string; + +// @public +export interface NetezzaPartitionSettings { + partitionColumnName?: any; + partitionLowerBound?: any; + partitionUpperBound?: any; +} + +// @public +export type NetezzaSource = TabularSource & { + type: "NetezzaSource"; + query?: any; + partitionOption?: NetezzaPartitionOption; + partitionSettings?: NetezzaPartitionSettings; +}; + +// @public +export type NetezzaTableDataset = Dataset & { + type: "NetezzaTable"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type NodeSize = string; + +// @public +export type NodeSizeFamily = string; + +// @public +export interface Notebook { + [property: string]: any; + bigDataPool?: BigDataPoolReference | null; + cells: NotebookCell[]; + description?: string; + metadata: NotebookMetadata; + nbformat: number; + nbformatMinor: number; + sessionProperties?: NotebookSessionProperties | null; +} + +// @public +export interface NotebookCell { + [property: string]: any; + attachments?: any; + cellType: string; + metadata: any; + outputs?: NotebookCellOutputItem[]; + source: string[]; +} + +// @public +export interface NotebookCellOutputItem { + data?: any; + executionCount?: number; + metadata?: any; + name?: string; + outputType: CellOutputType; + text?: any; +} + +// @public +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type NotebookGetNotebookResponse = NotebookResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookResource; + }; +}; + +// @public +export type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookListResponse; + }; +}; + +// @public +export type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookListResponse; + }; +}; + +// @public +export type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookListResponse; + }; +}; + +// @public +export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: NotebookListResponse; + }; +}; + +// @public +export interface NotebookKernelSpec { + [property: string]: any; + displayName: string; + name: string; +} + +// @public +export interface NotebookLanguageInfo { + [property: string]: any; + codemirrorMode?: string; + name: string; +} + +// @public +export interface NotebookListResponse { + nextLink?: string; + value: NotebookResource[]; +} + +// @public +export interface NotebookMetadata { + [property: string]: any; + kernelspec?: NotebookKernelSpec; + languageInfo?: NotebookLanguageInfo; +} + +// @public +export class NotebookOperation { + constructor(client: ArtifactsClient); + createOrUpdateNotebook(notebookName: string, notebook: NotebookResource, options?: NotebookCreateOrUpdateNotebookOptionalParams): Promise>; + deleteNotebook(notebookName: string, options?: coreHttp.OperationOptions): Promise>; + getNotebook(notebookName: string, options?: NotebookGetNotebookOptionalParams): Promise; + listNotebooksByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + listNotebookSummaryByWorkSpace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameNotebook(notebookName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + +// @public +export type NotebookReferenceType = string; + +// @public +export interface NotebookResource { + readonly etag?: string; + readonly id?: string; + name: string; + properties: Notebook; + readonly type?: string; +} + +// @public +export interface NotebookSessionProperties { + driverCores: number; + driverMemory: string; + executorCores: number; + executorMemory: string; + numExecutors: number; +} + +// @public +export type ODataAadServicePrincipalCredentialType = string; + +// @public +export type ODataAuthenticationType = string; + +// @public +export type ODataLinkedService = LinkedService & { + type: "OData"; + url: any; + authenticationType?: ODataAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + tenant?: any; + servicePrincipalId?: any; + aadResourceId?: any; + aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType; + servicePrincipalKey?: SecretBaseUnion; + servicePrincipalEmbeddedCert?: SecretBaseUnion; + servicePrincipalEmbeddedCertPassword?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type ODataResourceDataset = Dataset & { + type: "ODataResource"; + path?: any; +}; + +// @public +export type ODataSource = CopySource & { + type: "ODataSource"; + query?: any; +}; + +// @public +export type OdbcLinkedService = LinkedService & { + type: "Odbc"; + connectionString: any; + authenticationType?: any; + credential?: SecretBaseUnion; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type OdbcSink = CopySink & { + type: "OdbcSink"; + preCopyScript?: any; +}; + +// @public +export type OdbcSource = TabularSource & { + type: "OdbcSource"; + query?: any; +}; + +// @public +export type OdbcTableDataset = Dataset & { + type: "OdbcTable"; + tableName?: any; +}; + +// @public +export type Office365Dataset = Dataset & { + type: "Office365Table"; + tableName: any; + predicate?: any; +}; + +// @public +export type Office365LinkedService = LinkedService & { + type: "Office365"; + office365TenantId: any; + servicePrincipalTenantId: any; + servicePrincipalId: any; + servicePrincipalKey: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type Office365Source = CopySource & { + type: "Office365Source"; + allowedGroups?: any; + userScopeFilterUri?: any; + dateFilterColumn?: any; + startTime?: any; + endTime?: any; + outputColumns?: any; +}; + +// @public +export type OracleLinkedService = LinkedService & { + type: "Oracle"; + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type OraclePartitionOption = string; + +// @public +export interface OraclePartitionSettings { + partitionColumnName?: any; + partitionLowerBound?: any; + partitionNames?: any; + partitionUpperBound?: any; +} + +// @public +export type OracleServiceCloudLinkedService = LinkedService & { + type: "OracleServiceCloud"; + host: any; + username: any; + password: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type OracleServiceCloudObjectDataset = Dataset & { + type: "OracleServiceCloudObject"; + tableName?: any; +}; + +// @public +export type OracleServiceCloudSource = TabularSource & { + type: "OracleServiceCloudSource"; + query?: any; +}; + +// @public +export type OracleSink = CopySink & { + type: "OracleSink"; + preCopyScript?: any; +}; + +// @public +export type OracleSource = CopySource & { + type: "OracleSource"; + oracleReaderQuery?: any; + queryTimeout?: any; + partitionOption?: OraclePartitionOption; + partitionSettings?: OraclePartitionSettings; +}; + +// @public +export type OracleTableDataset = Dataset & { + type: "OracleTable"; + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type OrcCompressionCodec = string; + +// @public +export type OrcDataset = Dataset & { + type: "Orc"; + location?: DatasetLocationUnion; + orcCompressionCodec?: OrcCompressionCodec; +}; + +// @public +export type OrcFormat = DatasetStorageFormat & { + type: "OrcFormat"; +}; + +// @public +export type OrcSink = CopySink & { + type: "OrcSink"; + storeSettings?: StoreWriteSettingsUnion; +}; + +// @public +export type OrcSource = CopySource & { + type: "OrcSource"; + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export interface ParameterSpecification { + defaultValue?: any; + type: ParameterType; +} + +// @public +export type ParameterType = string; + +// @public +export type ParquetCompressionCodec = string; + +// @public +export type ParquetDataset = Dataset & { + type: "Parquet"; + location?: DatasetLocationUnion; + compressionCodec?: ParquetCompressionCodec; +}; + +// @public +export type ParquetFormat = DatasetStorageFormat & { + type: "ParquetFormat"; +}; + +// @public +export type ParquetSink = CopySink & { + type: "ParquetSink"; + storeSettings?: StoreWriteSettingsUnion; +}; + +// @public +export type ParquetSource = CopySource & { + type: "ParquetSource"; + storeSettings?: StoreReadSettingsUnion; +}; + +// @public +export type PaypalLinkedService = LinkedService & { + type: "Paypal"; + host: any; + clientId: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type PaypalObjectDataset = Dataset & { + type: "PaypalObject"; + tableName?: any; +}; + +// @public +export type PaypalSource = TabularSource & { + type: "PaypalSource"; + query?: any; +}; + +// @public +export type PhoenixAuthenticationType = string; + +// @public +export type PhoenixLinkedService = LinkedService & { + type: "Phoenix"; + host: any; + port?: any; + httpPath?: any; + authenticationType: PhoenixAuthenticationType; + username?: any; + password?: SecretBaseUnion; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type PhoenixObjectDataset = Dataset & { + type: "PhoenixObject"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type PhoenixSource = TabularSource & { + type: "PhoenixSource"; + query?: any; +}; + +// @public +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { + isRecovery?: boolean; + parameters?: { + [propertyName: string]: any; + }; + referencePipelineRunId?: string; + startActivityName?: string; +} + +// @public +export type PipelineCreatePipelineRunResponse = CreateRunResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: CreateRunResponse; + }; +}; + +// @public +export interface PipelineFolder { + name?: string; +} + +// @public +export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type PipelineGetPipelineResponse = PipelineResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineResource; + }; +}; + +// @public +export type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineListResponse; + }; +}; + +// @public +export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineListResponse; + }; +}; + +// @public +export interface PipelineListResponse { + nextLink?: string; + value: PipelineResource[]; +} + +// @public +export class PipelineOperation { + constructor(client: ArtifactsClient); + createOrUpdatePipeline(pipelineName: string, pipeline: PipelineResource, options?: PipelineCreateOrUpdatePipelineOptionalParams): Promise>; + createPipelineRun(pipelineName: string, options?: PipelineCreatePipelineRunOptionalParams): Promise; + deletePipeline(pipelineName: string, options?: coreHttp.OperationOptions): Promise>; + getPipeline(pipelineName: string, options?: PipelineGetPipelineOptionalParams): Promise; + listPipelinesByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renamePipeline(pipelineName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + +// @public +export interface PipelineReference { + name?: string; + referenceName: string; + type: PipelineReferenceType; +} + +// @public +export type PipelineReferenceType = string; + +// @public +export type PipelineResource = AzureEntityResource & { + [property: string]: any; + description?: string; + activities?: ActivityUnion[]; + parameters?: { + [propertyName: string]: ParameterSpecification; + }; + variables?: { + [propertyName: string]: VariableSpecification; + }; + concurrency?: number; + annotations?: any[]; + runDimensions?: { + [propertyName: string]: any; + }; + folder?: PipelineFolder; +}; + +// @public +export interface PipelineRun { + [property: string]: any; + readonly durationInMs?: number; + readonly invokedBy?: PipelineRunInvokedBy; + readonly isLatest?: boolean; + readonly lastUpdated?: Date; + readonly message?: string; + readonly parameters?: { + [propertyName: string]: string; + }; + readonly pipelineName?: string; + readonly runEnd?: Date; + readonly runGroupId?: string; + readonly runId?: string; + readonly runStart?: Date; + readonly status?: string; +} + +// @public +export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { + isRecursive?: boolean; +} + +// @public +export type PipelineRunGetPipelineRunResponse = PipelineRun & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineRun; + }; +}; + +// @public +export interface PipelineRunInvokedBy { + readonly id?: string; + readonly invokedByType?: string; + readonly name?: string; +} + +// @public +export class PipelineRunOperation { + constructor(client: ArtifactsClient); + cancelPipelineRun(runId: string, options?: PipelineRunCancelPipelineRunOptionalParams): Promise; + getPipelineRun(runId: string, options?: coreHttp.OperationOptions): Promise; + queryActivityRuns(pipelineName: string, runId: string, filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions): Promise; + queryPipelineRunsByWorkspace(filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions): Promise; +} + +// @public +export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ActivityRunsQueryResponse; + }; +}; + +// @public +export type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: PipelineRunsQueryResponse; + }; +}; + +// @public +export interface PipelineRunsQueryResponse { + continuationToken?: string; + value: PipelineRun[]; +} + +// @public +export type PluginCurrentState = string; + +// @public +export interface PolybaseSettings { + [property: string]: any; + rejectSampleValue?: any; + rejectType?: PolybaseSettingsRejectType; + rejectValue?: any; + useTypeDefault?: any; +} + +// @public +export type PolybaseSettingsRejectType = string; + +// @public +export type PostgreSqlLinkedService = LinkedService & { + type: "PostgreSql"; + connectionString: any; + password?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type PostgreSqlSource = TabularSource & { + type: "PostgreSqlSource"; + query?: any; +}; + +// @public +export type PostgreSqlTableDataset = Dataset & { + type: "PostgreSqlTable"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type PrestoAuthenticationType = string; + +// @public +export type PrestoLinkedService = LinkedService & { + type: "Presto"; + host: any; + serverVersion: any; + catalog: any; + port?: any; + authenticationType: PrestoAuthenticationType; + username?: any; + password?: SecretBaseUnion; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + timeZoneID?: any; + encryptedCredential?: any; +}; + +// @public +export type PrestoObjectDataset = Dataset & { + type: "PrestoObject"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export type PrestoSource = TabularSource & { + type: "PrestoSource"; + query?: any; +}; + +// @public +export interface PrivateEndpoint { + readonly id?: string; +} + +// @public +export type PrivateEndpointConnection = Resource & { + privateEndpoint?: PrivateEndpoint; + privateLinkServiceConnectionState?: PrivateLinkServiceConnectionState; + readonly provisioningState?: string; +}; + +// @public +export interface PrivateLinkServiceConnectionState { + readonly actionsRequired?: string; + description?: string; + status?: string; +} + +// @public +export type ProxyResource = Resource & {}; + +// @public +export interface PurviewConfiguration { + purviewResourceId?: string; +} + +// @public +export interface QueryDataFlowDebugSessionsResponse { + nextLink?: string; + value?: DataFlowDebugSessionInfo[]; +} + +// @public +export type QuickBooksLinkedService = LinkedService & { + type: "QuickBooks"; + endpoint: any; + companyId: any; + consumerKey: any; + consumerSecret: SecretBaseUnion; + accessToken: SecretBaseUnion; + accessTokenSecret: SecretBaseUnion; + useEncryptedEndpoints?: any; + encryptedCredential?: any; +}; + +// @public +export type QuickBooksObjectDataset = Dataset & { + type: "QuickBooksObject"; + tableName?: any; +}; + +// @public +export type QuickBooksSource = TabularSource & { + type: "QuickBooksSource"; + query?: any; +}; + +// @public +export type RecurrenceFrequency = string; + +// @public +export interface RecurrenceSchedule { + [property: string]: any; + hours?: number[]; + minutes?: number[]; + monthDays?: number[]; + monthlyOccurrences?: RecurrenceScheduleOccurrence[]; + weekDays?: DayOfWeek[]; +} + +// @public +export interface RecurrenceScheduleOccurrence { + [property: string]: any; + day?: DayOfWeek; + occurrence?: number; +} + +// @public +export interface RedirectIncompatibleRowSettings { + [property: string]: any; + linkedServiceName: any; + path?: any; +} + +// @public +export interface RedshiftUnloadSettings { + bucketName: any; + s3LinkedServiceName: LinkedServiceReference; +} + +// @public +export type RelationalSource = CopySource & { + type: "RelationalSource"; + query?: any; +}; + +// @public +export type RelationalTableDataset = Dataset & { + type: "RelationalTable"; + tableName?: any; +}; + +// @public +export interface RerunTriggerListResponse { + readonly nextLink?: string; + value: RerunTriggerResource[]; +} + +// @public +export type RerunTriggerResource = AzureEntityResource & { + properties: RerunTumblingWindowTrigger; +}; + +// @public +export type RerunTumblingWindowTrigger = Trigger & { + type: "RerunTumblingWindowTrigger"; + parentTrigger?: any; + requestedStartTime: Date; + requestedEndTime: Date; + maxConcurrency: number; +}; + +// @public +export interface RerunTumblingWindowTriggerActionParameters { + endTime: Date; + maxConcurrency: number; + startTime: Date; +} + +// @public +export interface Resource { + readonly id?: string; + readonly name?: string; + readonly type?: string; +} + +// @public +export type ResourceIdentityType = "None" | "SystemAssigned"; + +// @public +export type ResponsysLinkedService = LinkedService & { + type: "Responsys"; + endpoint: any; + clientId: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ResponsysObjectDataset = Dataset & { + type: "ResponsysObject"; + tableName?: any; +}; + +// @public +export type ResponsysSource = TabularSource & { + type: "ResponsysSource"; + query?: any; +}; + +// @public +export type RestResourceDataset = Dataset & { + type: "RestResource"; + relativeUrl?: any; + requestMethod?: any; + requestBody?: any; + additionalHeaders?: any; + paginationRules?: any; +}; + +// @public +export type RestServiceAuthenticationType = string; + +// @public +export type RestServiceLinkedService = LinkedService & { + type: "RestService"; + url: any; + enableServerCertificateValidation?: any; + authenticationType: RestServiceAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + servicePrincipalId?: any; + servicePrincipalKey?: SecretBaseUnion; + tenant?: any; + aadResourceId?: any; + encryptedCredential?: any; +}; + +// @public +export type RestSource = CopySource & { + type: "RestSource"; + requestMethod?: any; + requestBody?: any; + additionalHeaders?: any; + paginationRules?: any; + httpRequestTimeout?: any; + requestInterval?: any; +}; + +// @public +export interface RetryPolicy { + count?: any; + intervalInSeconds?: number; +} + +// @public +export interface RunFilterParameters { + continuationToken?: string; + filters?: RunQueryFilter[]; + lastUpdatedAfter: Date; + lastUpdatedBefore: Date; + orderBy?: RunQueryOrderBy[]; +} + +// @public +export interface RunQueryFilter { + operand: RunQueryFilterOperand; + operator: RunQueryFilterOperator; + values: string[]; +} + +// @public +export type RunQueryFilterOperand = string; + +// @public +export type RunQueryFilterOperator = string; + +// @public +export type RunQueryOrder = string; + +// @public +export interface RunQueryOrderBy { + order: RunQueryOrder; + orderBy: RunQueryOrderByField; +} + +// @public +export type RunQueryOrderByField = string; + +// @public +export type SalesforceLinkedService = LinkedService & { + type: "Salesforce"; + environmentUrl?: any; + username?: any; + password?: SecretBaseUnion; + securityToken?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SalesforceMarketingCloudLinkedService = LinkedService & { + type: "SalesforceMarketingCloud"; + clientId: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type SalesforceMarketingCloudObjectDataset = Dataset & { + type: "SalesforceMarketingCloudObject"; + tableName?: any; +}; + +// @public +export type SalesforceMarketingCloudSource = TabularSource & { + type: "SalesforceMarketingCloudSource"; + query?: any; +}; + +// @public +export type SalesforceObjectDataset = Dataset & { + type: "SalesforceObject"; + objectApiName?: any; +}; + +// @public +export type SalesforceServiceCloudLinkedService = LinkedService & { + type: "SalesforceServiceCloud"; + environmentUrl?: any; + username?: any; + password?: SecretBaseUnion; + securityToken?: SecretBaseUnion; + extendedProperties?: any; + encryptedCredential?: any; +}; + +// @public +export type SalesforceServiceCloudObjectDataset = Dataset & { + type: "SalesforceServiceCloudObject"; + objectApiName?: any; +}; + +// @public +export type SalesforceServiceCloudSink = CopySink & { + type: "SalesforceServiceCloudSink"; + writeBehavior?: SalesforceSinkWriteBehavior; + externalIdFieldName?: any; + ignoreNullValues?: any; +}; + +// @public +export type SalesforceServiceCloudSource = CopySource & { + type: "SalesforceServiceCloudSource"; + query?: any; + readBehavior?: SalesforceSourceReadBehavior; +}; + +// @public +export type SalesforceSink = CopySink & { + type: "SalesforceSink"; + writeBehavior?: SalesforceSinkWriteBehavior; + externalIdFieldName?: any; + ignoreNullValues?: any; +}; + +// @public +export type SalesforceSinkWriteBehavior = string; + +// @public +export type SalesforceSource = TabularSource & { + type: "SalesforceSource"; + query?: any; + readBehavior?: SalesforceSourceReadBehavior; +}; + +// @public +export type SalesforceSourceReadBehavior = string; + +// @public +export type SapBwCubeDataset = Dataset & { + type: "SapBwCube"; +}; + +// @public +export type SapBWLinkedService = LinkedService & { + type: "SapBW"; + server: any; + systemNumber: any; + clientId: any; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SapBwSource = TabularSource & { + type: "SapBwSource"; + query?: any; +}; + +// @public +export type SapCloudForCustomerLinkedService = LinkedService & { + type: "SapCloudForCustomer"; + url: any; + username?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SapCloudForCustomerResourceDataset = Dataset & { + type: "SapCloudForCustomerResource"; + path: any; +}; + +// @public +export type SapCloudForCustomerSink = CopySink & { + type: "SapCloudForCustomerSink"; + writeBehavior?: SapCloudForCustomerSinkWriteBehavior; +}; + +// @public +export type SapCloudForCustomerSinkWriteBehavior = string; + +// @public +export type SapCloudForCustomerSource = TabularSource & { + type: "SapCloudForCustomerSource"; + query?: any; +}; + +// @public +export type SapEccLinkedService = LinkedService & { + type: "SapEcc"; + url: string; + username?: string; + password?: SecretBaseUnion; + encryptedCredential?: string; +}; + +// @public +export type SapEccResourceDataset = Dataset & { + type: "SapEccResource"; + path: any; +}; + +// @public +export type SapEccSource = TabularSource & { + type: "SapEccSource"; + query?: any; +}; + +// @public +export type SapHanaAuthenticationType = string; + +// @public +export type SapHanaLinkedService = LinkedService & { + type: "SapHana"; + connectionString?: any; + server: any; + authenticationType?: SapHanaAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SapHanaPartitionOption = string; + +// @public +export interface SapHanaPartitionSettings { + partitionColumnName?: any; +} + +// @public +export type SapHanaSource = TabularSource & { + type: "SapHanaSource"; + query?: any; + packetSize?: any; + partitionOption?: SapHanaPartitionOption; + partitionSettings?: SapHanaPartitionSettings; +}; + +// @public +export type SapHanaTableDataset = Dataset & { + type: "SapHanaTable"; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type SapOpenHubLinkedService = LinkedService & { + type: "SapOpenHub"; + server: any; + systemNumber: any; + clientId: any; + language?: any; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SapOpenHubSource = TabularSource & { + type: "SapOpenHubSource"; + excludeLastRequest?: any; + baseRequestId?: any; +}; + +// @public +export type SapOpenHubTableDataset = Dataset & { + type: "SapOpenHubTable"; + openHubDestinationName: any; + excludeLastRequest?: any; + baseRequestId?: any; +}; + +// @public +export type SapTableLinkedService = LinkedService & { + type: "SapTable"; + server?: any; + systemNumber?: any; + clientId?: any; + language?: any; + systemId?: any; + userName?: any; + password?: SecretBaseUnion; + messageServer?: any; + messageServerService?: any; + sncMode?: any; + sncMyName?: any; + sncPartnerName?: any; + sncLibraryPath?: any; + sncQop?: any; + logonGroup?: any; + encryptedCredential?: any; +}; + +// @public +export type SapTablePartitionOption = string; + +// @public +export interface SapTablePartitionSettings { + maxPartitionsNumber?: any; + partitionColumnName?: any; + partitionLowerBound?: any; + partitionUpperBound?: any; +} + +// @public +export type SapTableResourceDataset = Dataset & { + type: "SapTableResource"; + tableName: any; +}; + +// @public +export type SapTableSource = TabularSource & { + type: "SapTableSource"; + rowCount?: any; + rowSkips?: any; + rfcTableFields?: any; + rfcTableOptions?: any; + batchSize?: any; + customRfcReadTableFunctionModule?: any; + partitionOption?: SapTablePartitionOption; + partitionSettings?: SapTablePartitionSettings; +}; + +// @public +export type SchedulerCurrentState = string; + +// @public +export type ScheduleTrigger = MultiplePipelineTrigger & { + type: "ScheduleTrigger"; + recurrence: ScheduleTriggerRecurrence; +}; + +// @public +export interface ScheduleTriggerRecurrence { + [property: string]: any; + endTime?: Date; + frequency?: RecurrenceFrequency; + interval?: number; + schedule?: RecurrenceSchedule; + startTime?: Date; + timeZone?: string; +} + +// @public +export interface ScriptAction { + name: string; + parameters?: string; + roles: HdiNodeTypes; + uri: string; +} + +// @public +export interface SecretBase { + type: "SecureString" | "AzureKeyVaultSecret"; +} + +// @public (undocumented) +export type SecretBaseUnion = SecretBase | SecureString | AzureKeyVaultSecretReference; + +// @public +export type SecureString = SecretBase & { + type: "SecureString"; + value: string; +}; + +// @public +export type SelfDependencyTumblingWindowTriggerReference = DependencyReference & { + type: "SelfDependencyTumblingWindowTriggerReference"; + offset: string; + size?: string; +}; + +// @public +export type SelfHostedIntegrationRuntime = IntegrationRuntime & { + type: "SelfHosted"; + linkedInfo?: LinkedIntegrationRuntimeTypeUnion; +}; + +// @public (undocumented) +export type SendOperationFn = (args: OperationArguments, spec: OperationSpec) => Promise; + +// @public +export type ServiceNowAuthenticationType = string; + +// @public +export type ServiceNowLinkedService = LinkedService & { + type: "ServiceNow"; + endpoint: any; + authenticationType: ServiceNowAuthenticationType; + username?: any; + password?: SecretBaseUnion; + clientId?: any; + clientSecret?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ServiceNowObjectDataset = Dataset & { + type: "ServiceNowObject"; + tableName?: any; +}; + +// @public +export type ServiceNowSource = TabularSource & { + type: "ServiceNowSource"; + query?: any; +}; + +// @public +export type SetVariableActivity = Activity & { + type: "SetVariable"; + variableName?: string; + value?: any; +}; + +// @public +export type SftpAuthenticationType = string; + +// @public +export type SftpLocation = DatasetLocation & { + type: "SftpLocation"; +}; + +// @public +export type SftpReadSettings = StoreReadSettings & { + type: "SftpReadSettings"; + recursive?: any; + wildcardFolderPath?: any; + wildcardFileName?: any; + modifiedDatetimeStart?: any; + modifiedDatetimeEnd?: any; +}; + +// @public +export type SftpServerLinkedService = LinkedService & { + type: "Sftp"; + host: any; + port?: any; + authenticationType?: SftpAuthenticationType; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; + privateKeyPath?: any; + privateKeyContent?: SecretBaseUnion; + passPhrase?: SecretBaseUnion; + skipHostKeyValidation?: any; + hostKeyFingerprint?: any; +}; + +// @public +export type SftpWriteSettings = StoreWriteSettings & { + type: "SftpWriteSettings"; + operationTimeout?: any; +}; + +// @public +export type ShopifyLinkedService = LinkedService & { + type: "Shopify"; + host: any; + accessToken?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ShopifyObjectDataset = Dataset & { + type: "ShopifyObject"; + tableName?: any; +}; + +// @public +export type ShopifySource = TabularSource & { + type: "ShopifySource"; + query?: any; +}; + +// @public +export interface Sku { + capacity?: number; + name?: string; + tier?: string; +} + +// @public +export type SparkAuthenticationType = string; + +// @public (undocumented) +export interface SparkBatchJob { + appId?: string | null; + appInfo?: { + [propertyName: string]: string; + } | null; + artifactId?: string; + errors?: SparkServiceError[]; + id: number; + jobType?: SparkJobType; + // (undocumented) + livyInfo?: SparkBatchJobState; + logLines?: string[] | null; + name?: string; + plugin?: SparkServicePlugin; + result?: SparkBatchJobResultType; + scheduler?: SparkScheduler; + sparkPoolName?: string; + state?: string; + submitterId?: string; + submitterName?: string; + tags?: { + [propertyName: string]: string; + }; + workspaceName?: string; +} + +// @public +export type SparkBatchJobResultType = string; + +// @public (undocumented) +export interface SparkBatchJobState { + currentState?: string; + deadAt?: Date | null; + // (undocumented) + jobCreationRequest?: SparkRequest; + notStartedAt?: Date | null; + recoveringAt?: Date | null; + runningAt?: Date | null; + startingAt?: Date | null; + successAt?: Date | null; + terminatedAt?: Date | null; +} + +// @public +export type SparkErrorSource = string; + +// @public +export interface SparkJobDefinition { + [property: string]: any; + description?: string; + jobProperties: SparkJobProperties; + language?: string; + requiredSparkVersion?: string; + targetBigDataPool: BigDataPoolReference; +} + +// @public +export interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobDefinitionResource; + }; +}; + +// @public +export type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJob; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJob; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobDefinitionResource; + }; +}; + +// @public +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobDefinitionsListResponse; + }; +}; + +// @public +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobDefinitionsListResponse; + }; +}; + +// @public +export class SparkJobDefinitionOperation { + constructor(client: ArtifactsClient); + createOrUpdateSparkJobDefinition(sparkJobDefinitionName: string, sparkJobDefinition: SparkJobDefinitionResource, options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams): Promise; + debugSparkJobDefinition(sparkJobDefinitionAzureResource: SparkJobDefinitionResource, options?: coreHttp.OperationOptions): Promise>; + deleteSparkJobDefinition(sparkJobDefinitionName: string, options?: coreHttp.OperationOptions): Promise; + executeSparkJobDefinition(sparkJobDefinitionName: string, options?: coreHttp.OperationOptions): Promise>; + getSparkJobDefinition(sparkJobDefinitionName: string, options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams): Promise; + listSparkJobDefinitionsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameSparkJobDefinition(sparkJobDefinitionName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + +// @public +export type SparkJobDefinitionResource = AzureEntityResource & { + properties: SparkJobDefinition; +}; + +// @public +export interface SparkJobDefinitionsListResponse { + nextLink?: string; + value: SparkJobDefinitionResource[]; +} + +// @public +export interface SparkJobProperties { + [property: string]: any; + archives?: string[]; + args?: string[]; + className?: string; + conf?: any; + driverCores: number; + driverMemory: string; + executorCores: number; + executorMemory: string; + file: string; + files?: string[]; + jars?: string[]; + name?: string; + numExecutors: number; +} + +// @public +export type SparkJobReferenceType = string; + +// @public +export type SparkJobType = string; + +// @public +export type SparkLinkedService = LinkedService & { + type: "Spark"; + host: any; + port: any; + serverType?: SparkServerType; + thriftTransportProtocol?: SparkThriftTransportProtocol; + authenticationType: SparkAuthenticationType; + username?: any; + password?: SecretBaseUnion; + httpPath?: any; + enableSsl?: any; + trustedCertPath?: any; + useSystemTrustStore?: any; + allowHostNameCNMismatch?: any; + allowSelfSignedServerCert?: any; + encryptedCredential?: any; +}; + +// @public +export type SparkObjectDataset = Dataset & { + type: "SparkObject"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public (undocumented) +export interface SparkRequest { + // (undocumented) + archives?: string[]; + // (undocumented) + arguments?: string[]; + // (undocumented) + className?: string; + configuration?: { + [propertyName: string]: string; + }; + // (undocumented) + driverCores?: number; + // (undocumented) + driverMemory?: string; + // (undocumented) + executorCores?: number; + // (undocumented) + executorCount?: number; + // (undocumented) + executorMemory?: string; + // (undocumented) + file?: string; + // (undocumented) + files?: string[]; + // (undocumented) + jars?: string[]; + // (undocumented) + name?: string; + // (undocumented) + pythonFiles?: string[]; +} + +// @public (undocumented) +export interface SparkScheduler { + // (undocumented) + cancellationRequestedAt?: Date; + // (undocumented) + currentState?: SchedulerCurrentState; + // (undocumented) + endedAt?: Date | null; + // (undocumented) + scheduledAt?: Date | null; + // (undocumented) + submittedAt?: Date | null; +} + +// @public +export type SparkServerType = string; + +// @public (undocumented) +export interface SparkServiceError { + // (undocumented) + errorCode?: string; + // (undocumented) + message?: string; + // (undocumented) + source?: SparkErrorSource; +} + +// @public (undocumented) +export interface SparkServicePlugin { + // (undocumented) + cleanupStartedAt?: Date | null; + // (undocumented) + currentState?: PluginCurrentState; + // (undocumented) + monitoringStartedAt?: Date | null; + // (undocumented) + preparationStartedAt?: Date | null; + // (undocumented) + resourceAcquisitionStartedAt?: Date | null; + // (undocumented) + submissionStartedAt?: Date | null; +} + +// @public +export type SparkSource = TabularSource & { + type: "SparkSource"; + query?: any; +}; + +// @public +export type SparkThriftTransportProtocol = string; + +// @public +export interface SqlConnection { + [property: string]: any; + name: string; + type: SqlConnectionType; +} + +// @public +export type SqlConnectionType = string; + +// @public +export type SqlDWSink = CopySink & { + type: "SqlDWSink"; + preCopyScript?: any; + allowPolyBase?: any; + polyBaseSettings?: PolybaseSettings; + allowCopyCommand?: any; + copyCommandSettings?: DWCopyCommandSettings; + tableOption?: any; +}; + +// @public +export type SqlDWSource = TabularSource & { + type: "SqlDWSource"; + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: any; +}; + +// @public +export type SqlMISink = CopySink & { + type: "SqlMISink"; + sqlWriterStoredProcedureName?: any; + sqlWriterTableType?: any; + preCopyScript?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + storedProcedureTableTypeParameterName?: any; + tableOption?: any; +}; + +// @public +export type SqlMISource = TabularSource & { + type: "SqlMISource"; + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + produceAdditionalTypes?: any; +}; + +// @public +export type SqlPool = TrackedResource & { + sku?: Sku; + maxSizeBytes?: number; + collation?: string; + sourceDatabaseId?: string; + recoverableDatabaseId?: string; + provisioningState?: string; + status?: string; + restorePointInTime?: string; + createMode?: string; + creationDate?: Date; +}; + +// @public +export interface SqlPoolInfoListResult { + nextLink?: string; + value?: SqlPool[]; +} + +// @public +export interface SqlPoolReference { + referenceName: string; + type: SqlPoolReferenceType; +} + +// @public +export type SqlPoolReferenceType = string; + +// @public +export type SqlPoolsGetResponse = SqlPool & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlPool; + }; +}; + +// @public +export type SqlPoolsListResponse = SqlPoolInfoListResult & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlPoolInfoListResult; + }; +}; + +// @public +export class SqlPoolsOperation { + constructor(client: ArtifactsClient); + get(sqlPoolName: string, options?: coreHttp.OperationOptions): Promise; + list(options?: coreHttp.OperationOptions): Promise; +} + +// @public +export type SqlPoolStoredProcedureActivity = Activity & { + type: "SqlPoolStoredProcedure"; + sqlPool: SqlPoolReference; + storedProcedureName: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +// @public +export interface SqlScript { + [property: string]: any; + content: SqlScriptContent; + description?: string; + type?: SqlScriptType; +} + +// @public +export interface SqlScriptContent { + [property: string]: any; + currentConnection: SqlConnection; + metadata?: SqlScriptMetadata; + query: string; +} + +// @public +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlScriptResource; + }; +}; + +// @public +export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type SqlScriptGetSqlScriptResponse = SqlScriptResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlScriptResource; + }; +}; + +// @public +export type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlScriptsListResponse; + }; +}; + +// @public +export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlScriptsListResponse; + }; +}; + +// @public +export interface SqlScriptMetadata { + [property: string]: any; + language?: string; +} + +// @public +export class SqlScriptOperation { + constructor(client: ArtifactsClient); + createOrUpdateSqlScript(sqlScriptName: string, sqlScript: SqlScriptResource, options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams): Promise; + deleteSqlScript(sqlScriptName: string, options?: coreHttp.OperationOptions): Promise; + getSqlScript(sqlScriptName: string, options?: SqlScriptGetSqlScriptOptionalParams): Promise; + listSqlScriptsByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + renameSqlScript(sqlScriptName: string, request: ArtifactRenameRequest, options?: coreHttp.OperationOptions): Promise>; +} + +// @public +export interface SqlScriptResource { + readonly etag?: string; + readonly id?: string; + name: string; + properties: SqlScript; + readonly type?: string; +} + +// @public +export interface SqlScriptsListResponse { + nextLink?: string; + value: SqlScriptResource[]; +} + +// @public +export type SqlScriptType = string; + +// @public +export type SqlServerLinkedService = LinkedService & { + type: "SqlServer"; + connectionString: any; + userName?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SqlServerSink = CopySink & { + type: "SqlServerSink"; + sqlWriterStoredProcedureName?: any; + sqlWriterTableType?: any; + preCopyScript?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + storedProcedureTableTypeParameterName?: any; + tableOption?: any; +}; + +// @public +export type SqlServerSource = TabularSource & { + type: "SqlServerSource"; + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + produceAdditionalTypes?: any; +}; + +// @public +export type SqlServerStoredProcedureActivity = ExecutionActivity & { + type: "SqlServerStoredProcedure"; + storedProcedureName: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +// @public +export type SqlServerTableDataset = Dataset & { + type: "SqlServerTable"; + tableName?: any; + schemaTypePropertiesSchema?: any; + table?: any; +}; + +// @public +export type SqlSink = CopySink & { + type: "SqlSink"; + sqlWriterStoredProcedureName?: any; + sqlWriterTableType?: any; + preCopyScript?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + storedProcedureTableTypeParameterName?: any; + tableOption?: any; +}; + +// @public +export type SqlSource = TabularSource & { + type: "SqlSource"; + sqlReaderQuery?: any; + sqlReaderStoredProcedureName?: any; + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +// @public +export type SquareLinkedService = LinkedService & { + type: "Square"; + host: any; + clientId: any; + clientSecret?: SecretBaseUnion; + redirectUri: any; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type SquareObjectDataset = Dataset & { + type: "SquareObject"; + tableName?: any; +}; + +// @public +export type SquareSource = TabularSource & { + type: "SquareSource"; + query?: any; +}; + +// @public +export interface SsisAccessCredential { + domain: any; + password: SecretBaseUnion; + userName: any; +} + +// @public +export interface SsisChildPackage { + packageContent: any; + packageLastModifiedDate?: string; + packageName?: string; + packagePath: any; +} + +// @public +export interface SsisExecutionCredential { + domain: any; + password: SecureString; + userName: any; +} + +// @public +export interface SsisExecutionParameter { + value: any; +} + +// @public +export interface SsisLogLocation { + accessCredential?: SsisAccessCredential; + logPath: any; + logRefreshInterval?: any; + type: SsisLogLocationType; +} + +// @public +export type SsisLogLocationType = string; + +// @public +export interface SsisObjectMetadataStatusResponse { + error?: string; + name?: string; + properties?: string; + status?: string; +} + +// @public +export interface SsisPackageLocation { + accessCredential?: SsisAccessCredential; + childPackages?: SsisChildPackage[]; + configurationPath?: any; + packageContent?: any; + packageLastModifiedDate?: string; + packageName?: string; + packagePassword?: SecretBaseUnion; + packagePath?: any; + type?: SsisPackageLocationType; +} + +// @public +export type SsisPackageLocationType = string; + +// @public +export interface SsisPropertyOverride { + isSensitive?: boolean; + value: any; +} + +// @public +export interface StagingSettings { + [property: string]: any; + enableCompression?: any; + linkedServiceName: LinkedServiceReference; + path?: any; +} + +// @public +export interface StartDataFlowDebugSessionRequest { + dataFlow?: DataFlowResource; + datasets?: DatasetResource[]; + debugSettings?: any; + incrementalDebug?: boolean; + linkedServices?: LinkedServiceResource[]; + sessionId?: string; + staging?: any; +} + +// @public +export interface StartDataFlowDebugSessionResponse { + jobVersion?: string; +} + +// @public +export interface StoredProcedureParameter { + type?: StoredProcedureParameterType; + value?: any; +} + +// @public +export type StoredProcedureParameterType = string; + +// @public +export interface StoreReadSettings { + [property: string]: any; + maxConcurrentConnections?: any; + type: "AzureBlobStorageReadSettings" | "AzureBlobFSReadSettings" | "AzureDataLakeStoreReadSettings" | "AmazonS3ReadSettings" | "FileServerReadSettings" | "AzureFileStorageReadSettings" | "GoogleCloudStorageReadSettings" | "FtpReadSettings" | "SftpReadSettings" | "HttpReadSettings" | "HdfsReadSettings"; +} + +// @public (undocumented) +export type StoreReadSettingsUnion = StoreReadSettings | AzureBlobStorageReadSettings | AzureBlobFSReadSettings | AzureDataLakeStoreReadSettings | AmazonS3ReadSettings | FileServerReadSettings | AzureFileStorageReadSettings | GoogleCloudStorageReadSettings | FtpReadSettings | SftpReadSettings | HttpReadSettings | HdfsReadSettings; + +// @public +export interface StoreWriteSettings { + [property: string]: any; + copyBehavior?: any; + maxConcurrentConnections?: any; + type: "SftpWriteSettings" | "AzureBlobStorageWriteSettings" | "AzureBlobFSWriteSettings" | "AzureDataLakeStoreWriteSettings" | "FileServerWriteSettings"; +} + +// @public (undocumented) +export type StoreWriteSettingsUnion = StoreWriteSettings | SftpWriteSettings | AzureBlobStorageWriteSettings | AzureBlobFSWriteSettings | AzureDataLakeStoreWriteSettings | FileServerWriteSettings; + +// @public +export type SubResource = AzureEntityResource & {}; + +// @public +export interface SubResourceDebugResource { + name?: string; +} + +// @public +export type SwitchActivity = Activity & { + type: "Switch"; + on: Expression; + cases?: SwitchCase[]; + defaultActivities?: ActivityUnion[]; +}; + +// @public +export interface SwitchCase { + activities?: ActivityUnion[]; + value?: string; +} + +// @public +export type SybaseAuthenticationType = string; + +// @public +export type SybaseLinkedService = LinkedService & { + type: "Sybase"; + server: any; + database: any; + schema?: any; + authenticationType?: SybaseAuthenticationType; + username?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type SybaseSource = TabularSource & { + type: "SybaseSource"; + query?: any; +}; + +// @public +export type SybaseTableDataset = Dataset & { + type: "SybaseTable"; + tableName?: any; +}; + +// @public +export type SynapseNotebookActivity = ExecutionActivity & { + type: "SynapseNotebook"; + notebook: SynapseNotebookReference; + parameters?: { + [propertyName: string]: any; + }; +}; + +// @public +export interface SynapseNotebookReference { + referenceName: string; + type: NotebookReferenceType; +} + +// @public +export type SynapseSparkJobDefinitionActivity = ExecutionActivity & { + type: "SparkJob"; + sparkJob: SynapseSparkJobReference; +}; + +// @public +export interface SynapseSparkJobReference { + referenceName: string; + type: SparkJobReferenceType; +} + +// @public +export type TabularSource = CopySource & { + type: "TabularSource"; + queryTimeout?: any; +}; + +// @public (undocumented) +export type TabularSourceUnion = TabularSource | AzureTableSource | InformixSource | Db2Source | OdbcSource | MySqlSource | PostgreSqlSource | SybaseSource | SapBwSource | SalesforceSource | SapCloudForCustomerSource | SapEccSource | SapHanaSource | SapOpenHubSource | SapTableSource | SqlSource | SqlServerSource | AzureSqlSource | SqlMISource | SqlDWSource | AzureMySqlSource | TeradataSource | CassandraSource | AmazonMWSSource | AzurePostgreSqlSource | ConcurSource | CouchbaseSource | DrillSource | EloquaSource | GoogleBigQuerySource | GreenplumSource | HBaseSource | HiveSource | HubspotSource | ImpalaSource | JiraSource | MagentoSource | MariaDBSource | AzureMariaDBSource | MarketoSource | PaypalSource | PhoenixSource | PrestoSource | QuickBooksSource | ServiceNowSource | ShopifySource | SparkSource | SquareSource | XeroSource | ZohoSource | NetezzaSource | VerticaSource | SalesforceMarketingCloudSource | ResponsysSource | DynamicsAXSource | OracleServiceCloudSource | GoogleAdWordsSource | AmazonRedshiftSource; + +// @public +export type TabularTranslator = CopyTranslator & { + type: "TabularTranslator"; + columnMappings?: any; + schemaMapping?: any; + collectionReference?: any; + mapComplexValuesToString?: any; + mappings?: any; +}; + +// @public +export type TeradataAuthenticationType = string; + +// @public +export type TeradataLinkedService = LinkedService & { + type: "Teradata"; + connectionString?: any; + server?: any; + authenticationType?: TeradataAuthenticationType; + username?: any; + password?: SecretBaseUnion; + encryptedCredential?: any; +}; + +// @public +export type TeradataPartitionOption = string; + +// @public +export interface TeradataPartitionSettings { + partitionColumnName?: any; + partitionLowerBound?: any; + partitionUpperBound?: any; +} + +// @public +export type TeradataSource = TabularSource & { + type: "TeradataSource"; + query?: any; + partitionOption?: TeradataPartitionOption; + partitionSettings?: TeradataPartitionSettings; +}; + +// @public +export type TeradataTableDataset = Dataset & { + type: "TeradataTable"; + database?: any; + table?: any; +}; + +// @public +export type TextFormat = DatasetStorageFormat & { + type: "TextFormat"; + columnDelimiter?: any; + rowDelimiter?: any; + escapeChar?: any; + quoteChar?: any; + nullValue?: any; + encodingName?: any; + treatEmptyAsNull?: any; + skipLineCount?: any; + firstRowAsHeader?: any; +}; + +// @public +export type TrackedResource = Resource & { + tags?: { + [propertyName: string]: string; + }; + location: string; +}; + +// @public +export interface Transformation { + description?: string; + name: string; +} + +// @public +export interface Trigger { + [property: string]: any; + annotations?: any[]; + description?: string; + readonly runtimeState?: TriggerRuntimeState; + type: "RerunTumblingWindowTrigger" | "MultiplePipelineTrigger" | "ScheduleTrigger" | "BlobTrigger" | "BlobEventsTrigger" | "TumblingWindowTrigger" | "ChainingTrigger"; +} + +// @public +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { + ifMatch?: string; +} + +// @public +export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerResource; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface TriggerDependencyProvisioningStatus { + provisioningStatus: string; + triggerName: string; +} + +// @public +export type TriggerDependencyReference = DependencyReference & { + type: "TriggerDependencyReference"; + referenceTrigger: TriggerReference; +}; + +// @public (undocumented) +export type TriggerDependencyReferenceUnion = TriggerDependencyReference | TumblingWindowTriggerDependencyReference; + +// @public +export type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerSubscriptionOperationStatus; + }; +}; + +// @public +export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { + ifNoneMatch?: string; +} + +// @public +export type TriggerGetTriggerResponse = TriggerResource & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerResource; + }; +}; + +// @public +export type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerListResponse; + }; +}; + +// @public +export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerListResponse; + }; +}; + +// @public +export interface TriggerListResponse { + nextLink?: string; + value: TriggerResource[]; +} + +// @public +export class TriggerOperation { + constructor(client: ArtifactsClient); + createOrUpdateTrigger(triggerName: string, trigger: TriggerResource, options?: TriggerCreateOrUpdateTriggerOptionalParams): Promise>; + deleteTrigger(triggerName: string, options?: coreHttp.OperationOptions): Promise>; + getEventSubscriptionStatus(triggerName: string, options?: coreHttp.OperationOptions): Promise; + getTrigger(triggerName: string, options?: TriggerGetTriggerOptionalParams): Promise; + listTriggersByWorkspace(options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + startTrigger(triggerName: string, options?: coreHttp.OperationOptions): Promise>; + stopTrigger(triggerName: string, options?: coreHttp.OperationOptions): Promise>; + subscribeTriggerToEvents(triggerName: string, options?: coreHttp.OperationOptions): Promise>; + unsubscribeTriggerFromEvents(triggerName: string, options?: coreHttp.OperationOptions): Promise>; +} + +// @public +export interface TriggerPipelineReference { + parameters?: { + [propertyName: string]: any; + }; + pipelineReference?: PipelineReference; +} + +// @public +export interface TriggerReference { + referenceName: string; + type: TriggerReferenceType; +} + +// @public +export type TriggerReferenceType = string; + +// @public +export type TriggerResource = AzureEntityResource & { + properties: TriggerUnion; +}; + +// @public +export interface TriggerRun { + [property: string]: any; + readonly message?: string; + readonly properties?: { + [propertyName: string]: string; + }; + readonly status?: TriggerRunStatus; + readonly triggeredPipelines?: { + [propertyName: string]: string; + }; + readonly triggerName?: string; + readonly triggerRunId?: string; + readonly triggerRunTimestamp?: Date; + readonly triggerType?: string; +} + +// @public +export class TriggerRunOperation { + constructor(client: ArtifactsClient); + cancelTriggerInstance(triggerName: string, runId: string, options?: coreHttp.OperationOptions): Promise; + queryTriggerRunsByWorkspace(filterParameters: RunFilterParameters, options?: coreHttp.OperationOptions): Promise; + rerunTriggerInstance(triggerName: string, runId: string, options?: coreHttp.OperationOptions): Promise; +} + +// @public +export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerRunsQueryResponse; + }; +}; + +// @public +export interface TriggerRunsQueryResponse { + continuationToken?: string; + value: TriggerRun[]; +} + +// @public +export type TriggerRunStatus = string; + +// @public +export type TriggerRuntimeState = string; + +// @public +export type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerSubscriptionOperationStatus; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export interface TriggerSubscriptionOperationStatus { + readonly status?: EventSubscriptionStatus; + readonly triggerName?: string; +} + +// @public (undocumented) +export type TriggerUnion = Trigger | RerunTumblingWindowTrigger | MultiplePipelineTriggerUnion | TumblingWindowTrigger | ChainingTrigger; + +// @public +export type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: TriggerSubscriptionOperationStatus; + [LROSYM]: LROResponseInfo; + }; +}; + +// @public +export type TumblingWindowFrequency = string; + +// @public +export type TumblingWindowTrigger = Trigger & { + type: "TumblingWindowTrigger"; + pipeline: TriggerPipelineReference; + frequency: TumblingWindowFrequency; + interval: number; + startTime: Date; + endTime?: Date; + delay?: any; + maxConcurrency: number; + retryPolicy?: RetryPolicy; + dependsOn?: DependencyReferenceUnion[]; +}; + +// @public +export type TumblingWindowTriggerDependencyReference = TriggerDependencyReference & { + type: "TumblingWindowTriggerDependencyReference"; + offset?: string; + size?: string; +}; + +// @public +export type Type = string; + +// @public +export type UntilActivity = Activity & { + type: "Until"; + expression: Expression; + timeout?: any; + activities: ActivityUnion[]; +}; + +// @public +export interface UserProperty { + name: string; + value: any; +} + +// @public +export type ValidationActivity = Activity & { + type: "Validation"; + timeout?: any; + sleep?: any; + minimumSize?: any; + childItems?: any; + dataset: DatasetReference; +}; + +// @public +export interface VariableSpecification { + defaultValue?: any; + type: VariableType; +} + +// @public +export type VariableType = string; + +// @public +export type VerticaLinkedService = LinkedService & { + type: "Vertica"; + connectionString?: any; + pwd?: AzureKeyVaultSecretReference; + encryptedCredential?: any; +}; + +// @public +export type VerticaSource = TabularSource & { + type: "VerticaSource"; + query?: any; +}; + +// @public +export type VerticaTableDataset = Dataset & { + type: "VerticaTable"; + tableName?: any; + table?: any; + schemaTypePropertiesSchema?: any; +}; + +// @public +export interface VirtualNetworkProfile { + computeSubnetId?: string; +} + +// @public +export type WaitActivity = Activity & { + type: "Wait"; + waitTimeInSeconds: number; +}; + +// @public +export type WebActivity = ExecutionActivity & { + type: "WebActivity"; + method: WebActivityMethod; + url: any; + headers?: any; + body?: any; + authentication?: WebActivityAuthentication; + datasets?: DatasetReference[]; + linkedServices?: LinkedServiceReference[]; + connectVia?: IntegrationRuntimeReference; +}; + +// @public +export interface WebActivityAuthentication { + password?: SecretBaseUnion; + pfx?: SecretBaseUnion; + resource?: string; + type: string; + username?: string; +} + +// @public +export type WebActivityMethod = string; + +// @public +export type WebAnonymousAuthentication = WebLinkedServiceTypeProperties & { + authenticationType: "Anonymous"; +}; + +// @public +export type WebAuthenticationType = string; + +// @public +export type WebBasicAuthentication = WebLinkedServiceTypeProperties & { + authenticationType: "Basic"; + username: any; + password: SecretBaseUnion; +}; + +// @public +export type WebClientCertificateAuthentication = WebLinkedServiceTypeProperties & { + authenticationType: "ClientCertificate"; + pfx: SecretBaseUnion; + password: SecretBaseUnion; +}; + +// @public +export type WebHookActivity = Activity & { + type: "WebHook"; + method: WebHookActivityMethod; + url: any; + timeout?: string; + headers?: any; + body?: any; + authentication?: WebActivityAuthentication; + reportStatusOnCallBack?: any; +}; + +// @public +export type WebHookActivityMethod = string; + +// @public +export type WebLinkedService = LinkedService & { + type: "Web"; + typeProperties: WebLinkedServiceTypePropertiesUnion; +}; + +// @public +export interface WebLinkedServiceTypeProperties { + authenticationType: "Anonymous" | "Basic" | "ClientCertificate"; + url: any; +} + +// @public (undocumented) +export type WebLinkedServiceTypePropertiesUnion = WebLinkedServiceTypeProperties | WebAnonymousAuthentication | WebBasicAuthentication | WebClientCertificateAuthentication; + +// @public +export type WebSource = CopySource & { + type: "WebSource"; +}; + +// @public +export type WebTableDataset = Dataset & { + type: "WebTable"; + index: any; + path?: any; +}; + +// @public +export type Workspace = TrackedResource & { + identity?: ManagedIdentity; + defaultDataLakeStorage?: DataLakeStorageAccountDetails; + sqlAdministratorLoginPassword?: string; + managedResourceGroupName?: string; + readonly provisioningState?: string; + sqlAdministratorLogin?: string; + virtualNetworkProfile?: VirtualNetworkProfile; + connectivityEndpoints?: { + [propertyName: string]: string; + }; + managedVirtualNetwork?: string; + privateEndpointConnections?: PrivateEndpointConnection[]; + encryption?: EncryptionDetails; + readonly workspaceUID?: string; + readonly extraProperties?: { + [propertyName: string]: any; + }; + managedVirtualNetworkSettings?: ManagedVirtualNetworkSettings; + workspaceRepositoryConfiguration?: WorkspaceRepositoryConfiguration; + purviewConfiguration?: PurviewConfiguration; +}; + +// @public +export type WorkspaceGetResponse = Workspace & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: Workspace; + }; +}; + +// @public +export interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams extends coreHttp.OperationOptions { + clientRequestId?: string; +} + +// @public +export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: GitHubAccessTokenResponse; + }; +}; + +// @public +export class WorkspaceGitRepoManagementOperation { + constructor(client: ArtifactsClient); + getGitHubAccessToken(gitHubAccessTokenRequest: GitHubAccessTokenRequest, options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams): Promise; +} + +// @public +export interface WorkspaceIdentity { + readonly principalId?: string; + readonly tenantId?: string; + type: "SystemAssigned"; +} + +// @public +export interface WorkspaceKeyDetails { + keyVaultUrl?: string; + name?: string; +} + +// @public +export class WorkspaceOperation { + constructor(client: ArtifactsClient); + get(options?: coreHttp.OperationOptions): Promise; +} + +// @public +export interface WorkspaceRepositoryConfiguration { + accountName?: string; + collaborationBranch?: string; + hostName?: string; + projectName?: string; + repositoryName?: string; + rootFolder?: string; + type?: string; +} + +// @public +export interface WorkspaceUpdateParameters { + identity?: WorkspaceIdentity; + tags?: { + [propertyName: string]: string; + }; +} + +// @public +export type XeroLinkedService = LinkedService & { + type: "Xero"; + host: any; + consumerKey?: SecretBaseUnion; + privateKey?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type XeroObjectDataset = Dataset & { + type: "XeroObject"; + tableName?: any; +}; + +// @public +export type XeroSource = TabularSource & { + type: "XeroSource"; + query?: any; +}; + +// @public +export type ZohoLinkedService = LinkedService & { + type: "Zoho"; + endpoint: any; + accessToken?: SecretBaseUnion; + useEncryptedEndpoints?: any; + useHostVerification?: any; + usePeerVerification?: any; + encryptedCredential?: any; +}; + +// @public +export type ZohoObjectDataset = Dataset & { + type: "ZohoObject"; + tableName?: any; +}; + +// @public +export type ZohoSource = TabularSource & { + type: "ZohoSource"; + query?: any; +}; + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-artifacts/rollup.config.js b/sdk/synapse/synapse-artifacts/rollup.config.js new file mode 100644 index 000000000000..cf8b131d1c51 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/rollup.config.js @@ -0,0 +1,41 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./dist-esm/artifactsClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/index.js", + format: "cjs", + name: "Azure.SynapseArtifacts", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] +}; + +export default config; diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClient.ts b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts new file mode 100644 index 000000000000..6db744d7a122 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/artifactsClient.ts @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { + LinkedService, + Dataset, + Pipeline, + PipelineRun, + Trigger, + TriggerRun, + DataFlow, + DataFlowDebugSession, + SqlScript, + SparkJobDefinition, + Notebook, + Workspace, + SqlPools, + BigDataPools, + IntegrationRuntimes, + WorkspaceGitRepoManagement +} from "./operations"; +import { ArtifactsClientContext } from "./artifactsClientContext"; +import { ArtifactsClientOptionalParams } from "./models"; + +export class ArtifactsClient extends ArtifactsClientContext { + /** + * Initializes a new instance of the ArtifactsClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: ArtifactsClientOptionalParams + ) { + super(credentials, endpoint, options); + this.linkedService = new LinkedService(this); + this.dataset = new Dataset(this); + this.pipeline = new Pipeline(this); + this.pipelineRun = new PipelineRun(this); + this.trigger = new Trigger(this); + this.triggerRun = new TriggerRun(this); + this.dataFlow = new DataFlow(this); + this.dataFlowDebugSession = new DataFlowDebugSession(this); + this.sqlScript = new SqlScript(this); + this.sparkJobDefinition = new SparkJobDefinition(this); + this.notebook = new Notebook(this); + this.workspace = new Workspace(this); + this.sqlPools = new SqlPools(this); + this.bigDataPools = new BigDataPools(this); + this.integrationRuntimes = new IntegrationRuntimes(this); + this.workspaceGitRepoManagement = new WorkspaceGitRepoManagement(this); + } + + linkedService: LinkedService; + dataset: Dataset; + pipeline: Pipeline; + pipelineRun: PipelineRun; + trigger: Trigger; + triggerRun: TriggerRun; + dataFlow: DataFlow; + dataFlowDebugSession: DataFlowDebugSession; + sqlScript: SqlScript; + sparkJobDefinition: SparkJobDefinition; + notebook: Notebook; + workspace: Workspace; + sqlPools: SqlPools; + bigDataPools: BigDataPools; + integrationRuntimes: IntegrationRuntimes; + workspaceGitRepoManagement: WorkspaceGitRepoManagement; +} diff --git a/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts new file mode 100644 index 000000000000..d5df64d397dc --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/artifactsClientContext.ts @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { ArtifactsClientOptionalParams } from "./models"; +import { lroPolicy } from "./lro"; + +const packageName = "@azure/synapse-artifacts"; +const packageVersion = "1.0.0"; + +export class ArtifactsClientContext extends coreHttp.ServiceClient { + endpoint: string; + apiVersion: string; + + /** + * Initializes a new instance of the ArtifactsClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: ArtifactsClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + if (!options.credentialScopes) { + options.credentialScopes = ["https://dev.azuresynapse.net/.default"]; + } + + // Building the request policy fatories based on the passed factories and the + // any required factories needed by the client. + if (Array.isArray(options.requestPolicyFactories)) { + // When an array of factories is passed in, we'll just add the required factories, + // in this case lroPolicy(). It is important to note that passing an array of factories + // to a new client, bypasses core-http default factories. Just the pipelines provided will be run. + options.requestPolicyFactories = [lroPolicy(), ...options.requestPolicyFactories]; + } else if (options.requestPolicyFactories) { + // When we were passed a requestPolicyFactories as a function, we'll create a new one that adds the factories provided + // in the options plus the required policies. When using this path, the pipelines passed to the client will be added to the + // default policies added by core-http + const optionsPolicies = options.requestPolicyFactories([lroPolicy()]) || [lroPolicy()]; + options.requestPolicyFactories = (defaultFactories) => [ + ...optionsPolicies, + ...defaultFactories + ]; + } else { + // In case no request policy factories were provided, we'll just need to create a function that will add + // the lroPolicy to the default pipelines added by core-http + options.requestPolicyFactories = (defaultFactories) => [lroPolicy(), ...defaultFactories]; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = options.endpoint || "{endpoint}"; + + // Parameter assignments + this.endpoint = endpoint; + + // Assigning values to Constant parameters + this.apiVersion = options.apiVersion || "2019-06-01-preview"; + } +} diff --git a/sdk/synapse/synapse-artifacts/src/index.ts b/sdk/synapse/synapse-artifacts/src/index.ts new file mode 100644 index 000000000000..57003284ea6c --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/index.ts @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +export * from "./models"; +export { + LROPoller, + LROPollerOptions, + LROOperationStep, + LROStrategy, + LROOperationResponse, + LROResponseInfo, + BaseResult, + LROOperationState, + FinalStateVia, + SendOperationFn +} from "./lro"; +export { ArtifactsClient } from "./artifactsClient"; +export { ArtifactsClientContext } from "./artifactsClientContext"; +export { + BigDataPools as BigDataPoolsOperation, + DataFlow as DataFlowOperation, + DataFlowDebugSession as DataFlowDebugSessionOperation, + Dataset as DatasetOperation, + WorkspaceGitRepoManagement as WorkspaceGitRepoManagementOperation, + Workspace as WorkspaceOperation, + TriggerRun as TriggerRunOperation, + Trigger as TriggerOperation, + SqlScript as SqlScriptOperation, + SqlPools as SqlPoolsOperation, + SparkJobDefinition as SparkJobDefinitionOperation, + PipelineRun as PipelineRunOperation, + Pipeline as PipelineOperation, + Notebook as NotebookOperation, + LinkedService as LinkedServiceOperation, + IntegrationRuntimes as IntegrationRuntimesOperation +} from "./operations"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts new file mode 100644 index 000000000000..b1619e36eea0 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/azureAsyncOperationStrategy.ts @@ -0,0 +1,209 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { + LROStrategy, + BaseResult, + LROOperationStep, + LROResponseInfo, + FinalStateVia, + LROSYM +} from "./models"; +import { OperationSpec, OperationArguments, OperationResponse } from "@azure/core-http"; +import { terminalStates } from "./constants"; +import { SendOperationFn } from "."; + +export function createAzureAsyncOperationStrategy( + initialOperation: LROOperationStep, + sendOperationFn: SendOperationFn, + finalStateVia?: FinalStateVia +): LROStrategy { + const lroData = initialOperation.result._response[LROSYM]; + if (!lroData) { + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); + } + + let currentOperation = initialOperation; + let lastKnownPollingUrl = lroData.azureAsyncOperation || lroData.operationLocation; + + return { + isTerminal: () => { + const currentResult = currentOperation.result._response[LROSYM]; + + if (!currentResult) { + throw new Error("Expected lroData to determine terminal status"); + } + + if (currentOperation === initialOperation) { + // Azure-AsyncOperations don't need to check for terminal state + // on originalOperation result, always need to poll + return false; + } + + const { status = "succeeded" } = currentResult; + return terminalStates.includes(status.toLowerCase()); + }, + sendFinalRequest: async () => { + if (!initialOperation.result._response[LROSYM]) { + throw new Error("Expected lroData to determine terminal status"); + } + + if (!currentOperation.result._response[LROSYM]) { + throw new Error("Expected lroData to determine terminal status"); + } + + const initialOperationResult = initialOperation.result._response[LROSYM]; + const currentOperationResult = currentOperation.result._response[LROSYM]; + + if (!shouldPerformFinalGet(initialOperationResult, currentOperationResult)) { + return currentOperation; + } + + if (initialOperationResult?.requestMethod === "PUT") { + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); + + return currentOperation; + } + + if (initialOperationResult?.location) { + switch (finalStateVia) { + case "original-uri": + currentOperation = await sendFinalGet(initialOperation, sendOperationFn); + return currentOperation; + + case "azure-async-operation": + return currentOperation; + case "location": + default: + const location = initialOperationResult.location || currentOperationResult?.location; + + if (!location) { + throw new Error("Couldn't determine final GET URL from location"); + } + + return await sendFinalGet(initialOperation, sendOperationFn, location); + } + } + + // All other cases return the last operation + return currentOperation; + }, + poll: async () => { + if (!lastKnownPollingUrl) { + throw new Error("Unable to determine polling url"); + } + + const pollingArgs = currentOperation.args; + // Make sure we don't send any body to the get request + const { requestBody, responses, ...restSpec } = currentOperation.spec; + + const pollingSpec: OperationSpec = { + ...restSpec, + responses: getCompositeMappers(responses), + httpMethod: "GET", + path: lastKnownPollingUrl + }; + + const result = await sendOperationFn(pollingArgs, pollingSpec); + + // Update latest polling url + lastKnownPollingUrl = + result._response[LROSYM]?.azureAsyncOperation || + result._response[LROSYM]?.operationLocation || + lastKnownPollingUrl; + + // Update lastOperation result + currentOperation = { + args: pollingArgs, + spec: pollingSpec, + result + }; + + return currentOperation; + } + }; +} + +/** + * Polling calls will always return a status object i.e. {"status": "success"} + * these intermediate responses are not described in the swagger so we need to + * pass custom mappers at runtime. + * This function replaces all the existing mappers to be able to deserialize a status object + * @param responses Original set of responses defined in the operation + */ +function getCompositeMappers(responses: { + [responseCode: string]: OperationResponse; +}): { + [responseCode: string]: OperationResponse; +} { + return Object.keys(responses).reduce((acc, statusCode) => { + return { + ...acc, + [statusCode]: { + ...responses[statusCode], + bodyMapper: { + type: { + name: "Composite", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + } + } + } + } + } + }; + }, {} as { [responseCode: string]: OperationResponse }); +} + +function shouldPerformFinalGet(initialResult?: LROResponseInfo, currentResult?: LROResponseInfo) { + const { status } = currentResult || {}; + const { requestMethod: initialRequestMethod, location } = initialResult || {}; + if (status && status.toLowerCase() !== "succeeded") { + return false; + } + + if (initialRequestMethod === "DELETE") { + return false; + } + + if (initialRequestMethod !== "PUT" && !location) { + return false; + } + + return true; +} + +async function sendFinalGet( + initialOperation: LROOperationStep, + sendOperationFn: SendOperationFn, + path?: string +): Promise> { + // Make sure we don't send any body to the get request + const { requestBody, ...restSpec } = initialOperation.spec; + const finalGetSpec: OperationSpec = { + ...restSpec, + httpMethod: "GET" + }; + + // Send final GET request to the Original URL + const spec = { + ...finalGetSpec, + ...(path && { path }) + }; + + let operationArgs: OperationArguments = initialOperation.args; + if (operationArgs.options) { + operationArgs.options.shouldDeserialize = true; + } + + const finalResult = await sendOperationFn(initialOperation.args, spec); + + return { + args: initialOperation.args, + spec, + result: finalResult + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts new file mode 100644 index 000000000000..f5ab0f2b7010 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/bodyPollingStrategy.ts @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { LROStrategy, BaseResult, LROOperationStep, LROSYM } from "./models"; +import { OperationSpec } from "@azure/core-http"; +import { terminalStates } from "./constants"; +import { SendOperationFn } from "./lroPoller"; + +/** + * Creates a polling strategy based on BodyPolling which uses the provisioning state + * from the result to determine the current operation state + */ +export function createBodyPollingStrategy( + initialOperation: LROOperationStep, + sendOperation: SendOperationFn +): LROStrategy { + if (!initialOperation.result._response[LROSYM]) { + throw new Error("Expected lroData to be defined for BodyPolling strategy"); + } + + let currentOperation = initialOperation; + + return { + isTerminal: () => { + const currentResult = currentOperation.result._response[LROSYM]; + if (!currentResult) { + throw new Error("Expected lroData to determine terminal status"); + } + + const { provisioningState = "succeeded" } = currentResult; + // If provisioning state is missing, default to Success + + return terminalStates.includes(provisioningState.toLowerCase()); + }, + sendFinalRequest: () => { + // BodyPolling doesn't require a final get so return the lastOperation + return Promise.resolve(currentOperation); + }, + poll: async () => { + // When doing BodyPolling, we need to poll to the original url with a + // GET http method + const { requestBody, ...restSpec } = initialOperation.spec; + const pollingSpec: OperationSpec = { + // Make sure we don't send any body to the get request + ...restSpec, + httpMethod: "GET" + }; + + // Execute the polling operation + initialOperation.result = await sendOperation(initialOperation.args, pollingSpec); + return initialOperation; + } + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/constants.ts b/sdk/synapse/synapse-artifacts/src/lro/constants.ts new file mode 100644 index 000000000000..c04d09e73f80 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/constants.ts @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const terminalStates = ["succeeded", "failed", "canceled", "cancelled"]; diff --git a/sdk/synapse/synapse-artifacts/src/lro/index.ts b/sdk/synapse/synapse-artifacts/src/lro/index.ts new file mode 100644 index 000000000000..ae5da2477850 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/index.ts @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { shouldDeserializeLRO } from "./requestUtils"; +export { createBodyPollingStrategy } from "./bodyPollingStrategy"; +export { terminalStates } from "./constants"; +export { lroPolicy } from "./lroPolicy"; +export { LROPoller, LROPollerOptions, SendOperationFn } from "./lroPoller"; +export { + LROResponseInfo, + BaseResult, + LROOperationStep, + LROOperationState, + LROStrategy, + LROOperation, + FinalStateVia, + LROOperationResponse +} from "./models"; +export { makeOperation } from "./operation"; +export * from "./locationStrategy"; diff --git a/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts new file mode 100644 index 000000000000..9ed079ccff59 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/locationStrategy.ts @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseResult, LROOperationStep, LROStrategy, LROSYM } from "./models"; +import { SendOperationFn } from "./lroPoller"; +import { OperationSpec } from "@azure/core-http"; + +export function createLocationStrategy( + initialOperation: LROOperationStep, + sendOperationFn: SendOperationFn +): LROStrategy { + const lroData = initialOperation.result._response[LROSYM]; + if (!lroData) { + throw new Error("Expected lroData to be defined for Azure-AsyncOperation strategy"); + } + + let currentOperation = initialOperation; + let lastKnownPollingUrl = lroData.location; + + return { + isTerminal: () => { + const currentResult = currentOperation.result._response[LROSYM]; + if (!currentResult) { + throw new Error("Expected lroData to determine terminal status"); + } + + if (currentOperation === initialOperation) { + return false; + } + + if (currentResult.statusCode === 202) { + return false; + } + + return true; + }, + sendFinalRequest: () => Promise.resolve(currentOperation), + poll: async () => { + if (!lastKnownPollingUrl) { + throw new Error("Unable to determine polling url"); + } + + const pollingArgs = currentOperation.args; + // Make sure we don't send any body to the get request + const { requestBody, ...restSpec } = currentOperation.spec; + const pollingSpec: OperationSpec = { + ...restSpec, + httpMethod: "GET", + path: lastKnownPollingUrl + }; + + const result = await sendOperationFn(pollingArgs, pollingSpec); + + // Update latest polling url + lastKnownPollingUrl = result._response[LROSYM]?.location || lastKnownPollingUrl; + + // Update lastOperation result + currentOperation = { + args: pollingArgs, + spec: pollingSpec, + result + }; + + return currentOperation; + } + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts new file mode 100644 index 000000000000..5e79eb789b2d --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPolicy.ts @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { + RequestPolicy, + RequestPolicyOptions, + BaseRequestPolicy, + HttpOperationResponse, + WebResource +} from "@azure/core-http"; +import { LROOperationResponse, LROSYM } from "./models"; +import { getLROData } from "./requestUtils"; + +export function lroPolicy() { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => { + return new LROPolicy(nextPolicy, options); + } + }; +} + +export class LROPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) { + super(nextPolicy, options); + } + + public async sendRequest(webResource: WebResource): Promise { + let result: LROOperationResponse = await this._nextPolicy.sendRequest(webResource); + const _lroData = getLROData(result); + + result[LROSYM] = _lroData; + + return result; + } +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts new file mode 100644 index 000000000000..b66b626cd18c --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/lroPoller.ts @@ -0,0 +1,130 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Poller } from "@azure/core-lro"; +import { OperationSpec, OperationArguments, delay, RestError } from "@azure/core-http"; +import { BaseResult, LROOperationState, LROOperationStep, FinalStateVia, LROSYM } from "./models"; +import { makeOperation } from "./operation"; +import { createBodyPollingStrategy } from "./bodyPollingStrategy"; +import { createAzureAsyncOperationStrategy } from "./azureAsyncOperationStrategy"; +import { createLocationStrategy } from "./locationStrategy"; +import { createPassthroughStrategy } from "./passthroughStrategy"; + +export type SendOperationFn = ( + args: OperationArguments, + spec: OperationSpec +) => Promise; + +export interface LROPollerOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * Arguments used to send the initial operation + */ + initialOperationArguments: OperationArguments; + /** + * Operation spec provided for the initial operation + */ + initialOperationSpec: OperationSpec; + /** + * Result from the initial operation + */ + initialOperationResult: TResult; + /** + * Function to execute an operation based on an operation spec and arguments + */ + sendOperation: SendOperationFn; + /** + * Optional information on where to poll. When not defined it defaults to "Location" + */ + finalStateVia?: FinalStateVia; +} + +export class LROPoller extends Poller< + LROOperationState, + TResult +> { + private intervalInMs: number; + + constructor({ + initialOperationArguments, + initialOperationResult, + initialOperationSpec, + sendOperation, + finalStateVia, + intervalInMs = 2000 + }: LROPollerOptions) { + const initialOperation = { + args: initialOperationArguments, + spec: initialOperationSpec, + result: initialOperationResult + }; + + const pollingStrategy = getPollingStrategy(initialOperation, sendOperation, finalStateVia); + + const state: LROOperationState = { + // Initial operation will become the last operation + initialOperation, + lastOperation: initialOperation, + pollingStrategy, + finalStateVia + }; + + const operation = makeOperation(state); + super(operation); + + this.intervalInMs = intervalInMs; + } + + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise { + return delay(this.intervalInMs); + } +} + +/** + * This function determines which strategy to use based on the response from + * the last operation executed, this last operation can be an initial operation + * or a polling operation. The 3 possible strategies are described below: + * + * A) Azure-AsyncOperation or Operation-Location + * B) Location + * C) BodyPolling (provisioningState) + * - This strategy is used when: + * - Response doesn't contain any of the following headers Location, Azure-AsyncOperation or Operation-Location + * - Last operation method is PUT + */ +function getPollingStrategy( + initialOperation: LROOperationStep, + sendOperationFn: SendOperationFn, + finalStateVia?: FinalStateVia +) { + const lroData = initialOperation.result._response[LROSYM]; + + if (!lroData) { + const error = new RestError( + "Service response doesn't include the required LRO data to continue polling" + ); + error.statusCode = initialOperation.result._response.status; + error.response = initialOperation.result._response; + throw error; + } + + if (lroData.azureAsyncOperation || lroData.operationLocation) { + return createAzureAsyncOperationStrategy(initialOperation, sendOperationFn, finalStateVia); + } + + if (lroData.location) { + return createLocationStrategy(initialOperation, sendOperationFn); + } + + if (["PUT", "PATCH"].includes(lroData.requestMethod || "")) { + return createBodyPollingStrategy(initialOperation, sendOperationFn); + } + + // Default strategy is just a passthrough returning the initial operation + return createPassthroughStrategy(initialOperation); +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/models.ts b/sdk/synapse/synapse-artifacts/src/lro/models.ts new file mode 100644 index 000000000000..a7b811e1b81b --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/models.ts @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { + OperationArguments, + OperationSpec, + RestResponse, + HttpMethods, + HttpOperationResponse +} from "@azure/core-http"; +import { PollOperationState, PollOperation } from "@azure/core-lro"; +export const LROSYM = Symbol("LROData"); + +export type FinalStateVia = "azure-async-operation" | "location" | "original-uri"; + +export interface LROResponseInfo { + requestMethod: HttpMethods; + statusCode: number; + isInitialRequest?: boolean; + azureAsyncOperation?: string; + operationLocation?: string; + location?: string; + provisioningState?: string; + status?: string; +} + +/** + * Extended operation response for LROs + */ +export type LROOperationResponse = HttpOperationResponse & { + /** + * Symbol that contains LRO details + */ + [LROSYM]?: LROResponseInfo; +}; + +export interface BaseResult extends RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: LROOperationResponse; +} + +export interface LROOperationStep { + args: OperationArguments; + spec: OperationSpec; + result: TResult; +} + +export interface LROOperationState extends PollOperationState { + lastOperation: LROOperationStep; + initialOperation: LROOperationStep; + pollingStrategy: LROStrategy; + finalStateVia?: FinalStateVia; +} + +export interface LROStrategy { + isTerminal: () => boolean; + sendFinalRequest: () => Promise>; + poll: () => Promise>; +} + +export type LROOperation = PollOperation< + LROOperationState, + TResult +>; diff --git a/sdk/synapse/synapse-artifacts/src/lro/operation.ts b/sdk/synapse/synapse-artifacts/src/lro/operation.ts new file mode 100644 index 000000000000..f35752e2da6b --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/operation.ts @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseResult, LROOperationState, LROOperation, LROSYM } from "./models"; + +/** + * Creates a copy of the operation from a given State + */ +export function makeOperation( + state: LROOperationState +): LROOperation { + return { + state: { ...state }, + update, + cancel, + toString: function(this: LROOperation) { + return JSON.stringify(this.state); + } + }; +} + +/** + * General update function for LROPoller, the general process is as follows + * 1. Check initial operation result to determine the strategy to use + * - Strategies: Location, Azure-AsyncOperation, Original Uri + * 2. Check if the operation result has a terminal state + * - Terminal state will be determined by each strategy + * 2.1 If it is terminal state Check if a final GET request is required, if so + * send final GET request and return result from operation. If no final GET + * is required, just return the result from operation. + * - Determining what to call for final request is responsibility of each strategy + * 2.2 If it is not terminal state, call the polling operation call it and go to step 1 + * - Determining what to call for polling is responsibility of each strategy + * - Strategies will always use the latest URI for polling if provided otherwise + * the last known one + */ +async function update( + this: LROOperation +): Promise> { + const state = { ...this.state }; + + const { sendFinalRequest, poll, isTerminal } = state.pollingStrategy; + const currentResponse = state.lastOperation; + const currentLroData = currentResponse.result._response[LROSYM]; + + if (!currentLroData) { + throw new Error("Expected lroData to be defined for updating LRO operation"); + } + + if (state.result) { + state.isCompleted = true; + return makeOperation(state); + } + + // Check if last result is terminal + if (isTerminal()) { + state.lastOperation = await sendFinalRequest(); + state.result = state.lastOperation.result; + } else { + state.lastOperation = await poll(); + } + + // Return operation + return makeOperation(state); +} + +/** + * Swagger doesn't support defining a cancel operation, we'll just mark + * the operation state as cancelled + */ +async function cancel( + this: LROOperation +): Promise> { + return makeOperation({ ...this.state, isCancelled: true }); +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts b/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts new file mode 100644 index 000000000000..b58b641981f3 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/passthroughStrategy.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { LROStrategy, BaseResult, LROOperationStep } from "./models"; + +/** + * Creates a polling strategy based on BodyPolling which uses the provisioning state + * from the result to determine the current operation state + */ +export function createPassthroughStrategy( + initialOperation: LROOperationStep +): LROStrategy { + return { + isTerminal: () => { + return true; + }, + sendFinalRequest: () => { + // BodyPolling doesn't require a final get so return the lastOperation + return Promise.resolve(initialOperation); + }, + poll: async () => { + throw new Error("Passthrough strategy should never poll"); + } + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts new file mode 100644 index 000000000000..5e9e3cabdcef --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/lro/requestUtils.ts @@ -0,0 +1,101 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpOperationResponse } from "@azure/core-http"; +import { terminalStates } from "./constants"; +import { LROResponseInfo } from "./models"; + +/** + * We need to selectively deserialize our responses, only deserializing if we + * are in a final LRO response, not deserializing any polling non-terminal responses + */ +export function shouldDeserializeLRO(finalStateVia?: string) { + let initialOperationInfo: LROResponseInfo | undefined; + let isInitialRequest = true; + + return (response: HttpOperationResponse) => { + if (response.status < 200 || response.status >= 300) { + return true; + } + + if (!initialOperationInfo) { + initialOperationInfo = getLROData(response); + } else { + isInitialRequest = false; + } + + if (initialOperationInfo.azureAsyncOperation || initialOperationInfo.operationLocation) { + return ( + !isInitialRequest && + isAsyncOperationFinalResponse(response, initialOperationInfo, finalStateVia) + ); + } + + if (initialOperationInfo.location) { + return isLocationFinalResponse(response); + } + + if (initialOperationInfo.requestMethod === "PUT") { + return isBodyPollingFinalResponse(response); + } + + return true; + }; +} + +function isAsyncOperationFinalResponse( + response: HttpOperationResponse, + initialOperationInfo: LROResponseInfo, + finalStateVia?: string +): boolean { + const status: string = response.parsedBody?.status || "Succeeded"; + if (!terminalStates.includes(status.toLowerCase())) { + return false; + } + + if (initialOperationInfo.requestMethod === "DELETE") { + return true; + } + + if ( + initialOperationInfo.requestMethod === "PUT" && + finalStateVia && + finalStateVia.toLowerCase() === "azure-asyncoperation" + ) { + return true; + } + + if (initialOperationInfo.requestMethod !== "PUT" && !initialOperationInfo.location) { + return true; + } + + return false; +} + +function isLocationFinalResponse(response: HttpOperationResponse): boolean { + return response.status !== 202; +} + +function isBodyPollingFinalResponse(response: HttpOperationResponse): boolean { + const provisioningState: string = + response.parsedBody?.properties?.provisioningState || "Succeeded"; + + if (terminalStates.includes(provisioningState.toLowerCase())) { + return true; + } + + return false; +} + +export function getLROData(result: HttpOperationResponse): LROResponseInfo { + const statusCode = result.status; + const { status, properties } = result.parsedBody || {}; + return { + statusCode, + azureAsyncOperation: result.headers.get("azure-asyncoperation"), + operationLocation: result.headers.get("operation-location"), + location: result.headers.get("location"), + requestMethod: result.request.method, + status, + provisioningState: properties?.provisioningState + }; +} diff --git a/sdk/synapse/synapse-artifacts/src/models/index.ts b/sdk/synapse/synapse-artifacts/src/models/index.ts new file mode 100644 index 000000000000..2f65ec8ff68f --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/models/index.ts @@ -0,0 +1,17574 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { LROSYM, LROResponseInfo } from "../lro/models"; + +export type LinkedServiceUnion = + | LinkedService + | AzureStorageLinkedService + | AzureBlobStorageLinkedService + | AzureTableStorageLinkedService + | AzureSqlDWLinkedService + | SqlServerLinkedService + | AzureSqlDatabaseLinkedService + | AzureSqlMILinkedService + | AzureBatchLinkedService + | AzureKeyVaultLinkedService + | CosmosDbLinkedService + | DynamicsLinkedService + | DynamicsCrmLinkedService + | CommonDataServiceForAppsLinkedService + | HDInsightLinkedService + | FileServerLinkedService + | AzureFileStorageLinkedService + | GoogleCloudStorageLinkedService + | OracleLinkedService + | AzureMySqlLinkedService + | MySqlLinkedService + | PostgreSqlLinkedService + | SybaseLinkedService + | Db2LinkedService + | TeradataLinkedService + | AzureMLLinkedService + | AzureMLServiceLinkedService + | OdbcLinkedService + | InformixLinkedService + | MicrosoftAccessLinkedService + | HdfsLinkedService + | ODataLinkedService + | WebLinkedService + | CassandraLinkedService + | MongoDbLinkedService + | MongoDbV2LinkedService + | CosmosDbMongoDbApiLinkedService + | AzureDataLakeStoreLinkedService + | AzureBlobFSLinkedService + | Office365LinkedService + | SalesforceLinkedService + | SalesforceServiceCloudLinkedService + | SapCloudForCustomerLinkedService + | SapEccLinkedService + | SapOpenHubLinkedService + | RestServiceLinkedService + | AmazonS3LinkedService + | AmazonRedshiftLinkedService + | CustomDataSourceLinkedService + | AzureSearchLinkedService + | HttpLinkedService + | FtpServerLinkedService + | SftpServerLinkedService + | SapBWLinkedService + | SapHanaLinkedService + | AmazonMWSLinkedService + | AzurePostgreSqlLinkedService + | ConcurLinkedService + | CouchbaseLinkedService + | DrillLinkedService + | EloquaLinkedService + | GoogleBigQueryLinkedService + | GreenplumLinkedService + | HBaseLinkedService + | HiveLinkedService + | HubspotLinkedService + | ImpalaLinkedService + | JiraLinkedService + | MagentoLinkedService + | MariaDBLinkedService + | AzureMariaDBLinkedService + | MarketoLinkedService + | PaypalLinkedService + | PhoenixLinkedService + | PrestoLinkedService + | QuickBooksLinkedService + | ServiceNowLinkedService + | ShopifyLinkedService + | SparkLinkedService + | SquareLinkedService + | XeroLinkedService + | ZohoLinkedService + | VerticaLinkedService + | NetezzaLinkedService + | SalesforceMarketingCloudLinkedService + | HDInsightOnDemandLinkedService + | AzureDataLakeAnalyticsLinkedService + | AzureDatabricksLinkedService + | ResponsysLinkedService + | DynamicsAXLinkedService + | OracleServiceCloudLinkedService + | GoogleAdWordsLinkedService + | SapTableLinkedService + | AzureDataExplorerLinkedService + | AzureFunctionLinkedService; +export type DatasetUnion = + | Dataset + | AvroDataset + | ParquetDataset + | DelimitedTextDataset + | JsonDataset + | OrcDataset + | BinaryDataset + | AzureTableDataset + | AzureSqlTableDataset + | AzureSqlMITableDataset + | AzureSqlDWTableDataset + | CassandraTableDataset + | CustomDataset + | CosmosDbSqlApiCollectionDataset + | DocumentDbCollectionDataset + | DynamicsEntityDataset + | DynamicsCrmEntityDataset + | CommonDataServiceForAppsEntityDataset + | Office365Dataset + | MongoDbCollectionDataset + | MongoDbV2CollectionDataset + | CosmosDbMongoDbApiCollectionDataset + | ODataResourceDataset + | OracleTableDataset + | TeradataTableDataset + | AzureMySqlTableDataset + | AmazonRedshiftTableDataset + | Db2TableDataset + | RelationalTableDataset + | InformixTableDataset + | OdbcTableDataset + | MySqlTableDataset + | PostgreSqlTableDataset + | MicrosoftAccessTableDataset + | SalesforceObjectDataset + | SalesforceServiceCloudObjectDataset + | SybaseTableDataset + | SapBwCubeDataset + | SapCloudForCustomerResourceDataset + | SapEccResourceDataset + | SapHanaTableDataset + | SapOpenHubTableDataset + | SqlServerTableDataset + | RestResourceDataset + | SapTableResourceDataset + | WebTableDataset + | AzureSearchIndexDataset + | AmazonMWSObjectDataset + | AzurePostgreSqlTableDataset + | ConcurObjectDataset + | CouchbaseTableDataset + | DrillTableDataset + | EloquaObjectDataset + | GoogleBigQueryObjectDataset + | GreenplumTableDataset + | HBaseObjectDataset + | HiveObjectDataset + | HubspotObjectDataset + | ImpalaObjectDataset + | JiraObjectDataset + | MagentoObjectDataset + | MariaDBTableDataset + | AzureMariaDBTableDataset + | MarketoObjectDataset + | PaypalObjectDataset + | PhoenixObjectDataset + | PrestoObjectDataset + | QuickBooksObjectDataset + | ServiceNowObjectDataset + | ShopifyObjectDataset + | SparkObjectDataset + | SquareObjectDataset + | XeroObjectDataset + | ZohoObjectDataset + | NetezzaTableDataset + | VerticaTableDataset + | SalesforceMarketingCloudObjectDataset + | ResponsysObjectDataset + | DynamicsAXResourceDataset + | OracleServiceCloudObjectDataset + | AzureDataExplorerTableDataset + | GoogleAdWordsObjectDataset; +export type ActivityUnion = + | Activity + | ControlActivity + | ExecutionActivityUnion + | ExecutePipelineActivity + | IfConditionActivity + | SwitchActivity + | ForEachActivity + | WaitActivity + | UntilActivity + | ValidationActivity + | FilterActivity + | SetVariableActivity + | AppendVariableActivity + | WebHookActivity + | SqlPoolStoredProcedureActivity; +export type TriggerUnion = + | Trigger + | RerunTumblingWindowTrigger + | MultiplePipelineTriggerUnion + | TumblingWindowTrigger + | ChainingTrigger; +export type DataFlowUnion = DataFlow | MappingDataFlow; +export type IntegrationRuntimeUnion = + | IntegrationRuntime + | ManagedIntegrationRuntime + | SelfHostedIntegrationRuntime; +export type SecretBaseUnion = SecretBase | SecureString | AzureKeyVaultSecretReference; +export type DatasetLocationUnion = + | DatasetLocation + | AzureBlobStorageLocation + | AzureBlobFSLocation + | AzureDataLakeStoreLocation + | AmazonS3Location + | FileServerLocation + | AzureFileStorageLocation + | GoogleCloudStorageLocation + | FtpServerLocation + | SftpLocation + | HttpServerLocation + | HdfsLocation; +export type DatasetStorageFormatUnion = + | DatasetStorageFormat + | TextFormat + | JsonFormat + | AvroFormat + | OrcFormat + | ParquetFormat; +export type DatasetCompressionUnion = + | DatasetCompression + | DatasetBZip2Compression + | DatasetGZipCompression + | DatasetDeflateCompression + | DatasetZipDeflateCompression; +export type WebLinkedServiceTypePropertiesUnion = + | WebLinkedServiceTypeProperties + | WebAnonymousAuthentication + | WebBasicAuthentication + | WebClientCertificateAuthentication; +export type StoreReadSettingsUnion = + | StoreReadSettings + | AzureBlobStorageReadSettings + | AzureBlobFSReadSettings + | AzureDataLakeStoreReadSettings + | AmazonS3ReadSettings + | FileServerReadSettings + | AzureFileStorageReadSettings + | GoogleCloudStorageReadSettings + | FtpReadSettings + | SftpReadSettings + | HttpReadSettings + | HdfsReadSettings; +export type StoreWriteSettingsUnion = + | StoreWriteSettings + | SftpWriteSettings + | AzureBlobStorageWriteSettings + | AzureBlobFSWriteSettings + | AzureDataLakeStoreWriteSettings + | FileServerWriteSettings; +export type FormatReadSettingsUnion = FormatReadSettings | DelimitedTextReadSettings; +export type FormatWriteSettingsUnion = + | FormatWriteSettings + | AvroWriteSettings + | DelimitedTextWriteSettings + | JsonWriteSettings; +export type CopySourceUnion = + | CopySource + | AvroSource + | ParquetSource + | DelimitedTextSource + | JsonSource + | OrcSource + | BinarySource + | TabularSourceUnion + | BlobSource + | DocumentDbCollectionSource + | CosmosDbSqlApiSource + | DynamicsSource + | DynamicsCrmSource + | CommonDataServiceForAppsSource + | RelationalSource + | MicrosoftAccessSource + | ODataSource + | SalesforceServiceCloudSource + | RestSource + | FileSystemSource + | HdfsSource + | AzureDataExplorerSource + | OracleSource + | WebSource + | MongoDbSource + | MongoDbV2Source + | CosmosDbMongoDbApiSource + | Office365Source + | AzureDataLakeStoreSource + | AzureBlobFSSource + | HttpSource; +export type CopySinkUnion = + | CopySink + | DelimitedTextSink + | JsonSink + | OrcSink + | AzurePostgreSqlSink + | AzureMySqlSink + | SapCloudForCustomerSink + | AzureQueueSink + | AzureTableSink + | AvroSink + | ParquetSink + | BinarySink + | BlobSink + | FileSystemSink + | DocumentDbCollectionSink + | CosmosDbSqlApiSink + | SqlSink + | SqlServerSink + | AzureSqlSink + | SqlMISink + | SqlDWSink + | OracleSink + | AzureDataLakeStoreSink + | AzureBlobFSSink + | AzureSearchIndexSink + | OdbcSink + | InformixSink + | MicrosoftAccessSink + | DynamicsSink + | DynamicsCrmSink + | CommonDataServiceForAppsSink + | AzureDataExplorerSink + | SalesforceSink + | SalesforceServiceCloudSink + | CosmosDbMongoDbApiSink; +export type CopyTranslatorUnion = CopyTranslator | TabularTranslator; +export type DependencyReferenceUnion = + | DependencyReference + | TriggerDependencyReferenceUnion + | SelfDependencyTumblingWindowTriggerReference; +export type LinkedIntegrationRuntimeTypeUnion = + | LinkedIntegrationRuntimeType + | LinkedIntegrationRuntimeKeyAuthorization + | LinkedIntegrationRuntimeRbacAuthorization; +export type ExecutionActivityUnion = + | ExecutionActivity + | CopyActivity + | HDInsightHiveActivity + | HDInsightPigActivity + | HDInsightMapReduceActivity + | HDInsightStreamingActivity + | HDInsightSparkActivity + | ExecuteSsisPackageActivity + | CustomActivity + | SqlServerStoredProcedureActivity + | DeleteActivity + | AzureDataExplorerCommandActivity + | LookupActivity + | WebActivity + | GetMetadataActivity + | AzureMLBatchExecutionActivity + | AzureMLUpdateResourceActivity + | AzureMLExecutePipelineActivity + | DataLakeAnalyticsUsqlActivity + | DatabricksNotebookActivity + | DatabricksSparkJarActivity + | DatabricksSparkPythonActivity + | AzureFunctionActivity + | ExecuteDataFlowActivity + | SynapseNotebookActivity + | SynapseSparkJobDefinitionActivity; +export type MultiplePipelineTriggerUnion = + | MultiplePipelineTrigger + | ScheduleTrigger + | BlobTrigger + | BlobEventsTrigger; +export type TabularSourceUnion = + | TabularSource + | AzureTableSource + | InformixSource + | Db2Source + | OdbcSource + | MySqlSource + | PostgreSqlSource + | SybaseSource + | SapBwSource + | SalesforceSource + | SapCloudForCustomerSource + | SapEccSource + | SapHanaSource + | SapOpenHubSource + | SapTableSource + | SqlSource + | SqlServerSource + | AzureSqlSource + | SqlMISource + | SqlDWSource + | AzureMySqlSource + | TeradataSource + | CassandraSource + | AmazonMWSSource + | AzurePostgreSqlSource + | ConcurSource + | CouchbaseSource + | DrillSource + | EloquaSource + | GoogleBigQuerySource + | GreenplumSource + | HBaseSource + | HiveSource + | HubspotSource + | ImpalaSource + | JiraSource + | MagentoSource + | MariaDBSource + | AzureMariaDBSource + | MarketoSource + | PaypalSource + | PhoenixSource + | PrestoSource + | QuickBooksSource + | ServiceNowSource + | ShopifySource + | SparkSource + | SquareSource + | XeroSource + | ZohoSource + | NetezzaSource + | VerticaSource + | SalesforceMarketingCloudSource + | ResponsysSource + | DynamicsAXSource + | OracleServiceCloudSource + | GoogleAdWordsSource + | AmazonRedshiftSource; +export type TriggerDependencyReferenceUnion = + | TriggerDependencyReference + | TumblingWindowTriggerDependencyReference; + +/** + * A list of linked service resources. + */ +export interface LinkedServiceListResponse { + /** + * List of linked services. + */ + value: LinkedServiceResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * The Azure Synapse nested object which contains the information and credential which can be used to connect with related store or compute resource. + */ +export interface LinkedService { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AzureStorage" + | "AzureBlobStorage" + | "AzureTableStorage" + | "AzureSqlDW" + | "SqlServer" + | "AzureSqlDatabase" + | "AzureSqlMI" + | "AzureBatch" + | "AzureKeyVault" + | "CosmosDb" + | "Dynamics" + | "DynamicsCrm" + | "CommonDataServiceForApps" + | "HDInsight" + | "FileServer" + | "AzureFileStorage" + | "GoogleCloudStorage" + | "Oracle" + | "AzureMySql" + | "MySql" + | "PostgreSql" + | "Sybase" + | "Db2" + | "Teradata" + | "AzureML" + | "AzureMLService" + | "Odbc" + | "Informix" + | "MicrosoftAccess" + | "Hdfs" + | "OData" + | "Web" + | "Cassandra" + | "MongoDb" + | "MongoDbV2" + | "CosmosDbMongoDbApi" + | "AzureDataLakeStore" + | "AzureBlobFS" + | "Office365" + | "Salesforce" + | "SalesforceServiceCloud" + | "SapCloudForCustomer" + | "SapEcc" + | "SapOpenHub" + | "RestService" + | "AmazonS3" + | "AmazonRedshift" + | "CustomDataSource" + | "AzureSearch" + | "HttpServer" + | "FtpServer" + | "Sftp" + | "SapBW" + | "SapHana" + | "AmazonMWS" + | "AzurePostgreSql" + | "Concur" + | "Couchbase" + | "Drill" + | "Eloqua" + | "GoogleBigQuery" + | "Greenplum" + | "HBase" + | "Hive" + | "Hubspot" + | "Impala" + | "Jira" + | "Magento" + | "MariaDB" + | "AzureMariaDB" + | "Marketo" + | "Paypal" + | "Phoenix" + | "Presto" + | "QuickBooks" + | "ServiceNow" + | "Shopify" + | "Spark" + | "Square" + | "Xero" + | "Zoho" + | "Vertica" + | "Netezza" + | "SalesforceMarketingCloud" + | "HDInsightOnDemand" + | "AzureDataLakeAnalytics" + | "AzureDatabricks" + | "Responsys" + | "DynamicsAX" + | "OracleServiceCloud" + | "GoogleAdWords" + | "SapTable" + | "AzureDataExplorer" + | "AzureFunction"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The integration runtime reference. + */ + connectVia?: IntegrationRuntimeReference; + /** + * Linked service description. + */ + description?: string; + /** + * Parameters for linked service. + */ + parameters?: { [propertyName: string]: ParameterSpecification }; + /** + * List of tags that can be used for describing the linked service. + */ + annotations?: any[]; +} + +/** + * Integration runtime reference type. + */ +export interface IntegrationRuntimeReference { + /** + * Type of integration runtime. + */ + type: IntegrationRuntimeReferenceType; + /** + * Reference integration runtime name. + */ + referenceName: string; + /** + * Arguments for integration runtime. + */ + parameters?: { [propertyName: string]: any }; +} + +/** + * Definition of a single parameter for an entity. + */ +export interface ParameterSpecification { + /** + * Parameter type. + */ + type: ParameterType; + /** + * Default value of parameter. + */ + defaultValue?: any; +} + +/** + * Common fields that are returned in the response for all Azure Resource Manager resources + */ +export interface Resource { + /** + * Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; +} + +/** + * The object that defines the structure of an Azure Synapse error response. + */ +export interface CloudError { + /** + * Error code. + */ + code: string; + /** + * Error message. + */ + message: string; + /** + * Property name/path in request associated with error. + */ + target?: string; + /** + * Array with additional error details. + */ + details?: CloudError[]; +} + +/** + * Request body structure for rename artifact. + */ +export interface ArtifactRenameRequest { + /** + * New name of the artifact. + */ + newName?: string; +} + +/** + * A list of dataset resources. + */ +export interface DatasetListResponse { + /** + * List of datasets. + */ + value: DatasetResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. + */ +export interface Dataset { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "Avro" + | "Parquet" + | "DelimitedText" + | "Json" + | "Orc" + | "Binary" + | "AzureTable" + | "AzureSqlTable" + | "AzureSqlMITable" + | "AzureSqlDWTable" + | "CassandraTable" + | "CustomDataset" + | "CosmosDbSqlApiCollection" + | "DocumentDbCollection" + | "DynamicsEntity" + | "DynamicsCrmEntity" + | "CommonDataServiceForAppsEntity" + | "Office365Table" + | "MongoDbCollection" + | "MongoDbV2Collection" + | "CosmosDbMongoDbApiCollection" + | "ODataResource" + | "OracleTable" + | "TeradataTable" + | "AzureMySqlTable" + | "AmazonRedshiftTable" + | "Db2Table" + | "RelationalTable" + | "InformixTable" + | "OdbcTable" + | "MySqlTable" + | "PostgreSqlTable" + | "MicrosoftAccessTable" + | "SalesforceObject" + | "SalesforceServiceCloudObject" + | "SybaseTable" + | "SapBwCube" + | "SapCloudForCustomerResource" + | "SapEccResource" + | "SapHanaTable" + | "SapOpenHubTable" + | "SqlServerTable" + | "RestResource" + | "SapTableResource" + | "WebTable" + | "AzureSearchIndex" + | "AmazonMWSObject" + | "AzurePostgreSqlTable" + | "ConcurObject" + | "CouchbaseTable" + | "DrillTable" + | "EloquaObject" + | "GoogleBigQueryObject" + | "GreenplumTable" + | "HBaseObject" + | "HiveObject" + | "HubspotObject" + | "ImpalaObject" + | "JiraObject" + | "MagentoObject" + | "MariaDBTable" + | "AzureMariaDBTable" + | "MarketoObject" + | "PaypalObject" + | "PhoenixObject" + | "PrestoObject" + | "QuickBooksObject" + | "ServiceNowObject" + | "ShopifyObject" + | "SparkObject" + | "SquareObject" + | "XeroObject" + | "ZohoObject" + | "NetezzaTable" + | "VerticaTable" + | "SalesforceMarketingCloudObject" + | "ResponsysObject" + | "DynamicsAXResource" + | "OracleServiceCloudObject" + | "AzureDataExplorerTable" + | "GoogleAdWordsObject"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Dataset description. + */ + description?: string; + /** + * Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. + */ + structure?: any; + /** + * Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. + */ + schema?: any; + /** + * Linked service reference. + */ + linkedServiceName: LinkedServiceReference; + /** + * Parameters for dataset. + */ + parameters?: { [propertyName: string]: ParameterSpecification }; + /** + * List of tags that can be used for describing the Dataset. + */ + annotations?: any[]; + /** + * The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + */ + folder?: DatasetFolder; +} + +/** + * Linked service reference type. + */ +export interface LinkedServiceReference { + /** + * Linked service reference type. + */ + type: Type; + /** + * Reference LinkedService name. + */ + referenceName: string; + /** + * Arguments for LinkedService. + */ + parameters?: { [propertyName: string]: any }; +} + +/** + * The folder that this Dataset is in. If not specified, Dataset will appear at the root level. + */ +export interface DatasetFolder { + /** + * The name of the folder that this Dataset is in. + */ + name?: string; +} + +/** + * A list of pipeline resources. + */ +export interface PipelineListResponse { + /** + * List of pipelines. + */ + value: PipelineResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * A pipeline activity. + */ +export interface Activity { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "Container" + | "Execution" + | "Copy" + | "HDInsightHive" + | "HDInsightPig" + | "HDInsightMapReduce" + | "HDInsightStreaming" + | "HDInsightSpark" + | "ExecuteSSISPackage" + | "Custom" + | "SqlServerStoredProcedure" + | "ExecutePipeline" + | "Delete" + | "AzureDataExplorerCommand" + | "Lookup" + | "WebActivity" + | "GetMetadata" + | "IfCondition" + | "Switch" + | "ForEach" + | "AzureMLBatchExecution" + | "AzureMLUpdateResource" + | "AzureMLExecutePipeline" + | "DataLakeAnalyticsU-SQL" + | "Wait" + | "Until" + | "Validation" + | "Filter" + | "DatabricksNotebook" + | "DatabricksSparkJar" + | "DatabricksSparkPython" + | "SetVariable" + | "AppendVariable" + | "AzureFunctionActivity" + | "WebHook" + | "ExecuteDataFlow" + | "SynapseNotebook" + | "SparkJob" + | "SqlPoolStoredProcedure"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Activity name. + */ + name: string; + /** + * Activity description. + */ + description?: string; + /** + * Activity depends on condition. + */ + dependsOn?: ActivityDependency[]; + /** + * Activity user properties. + */ + userProperties?: UserProperty[]; +} + +/** + * Activity dependency information. + */ +export interface ActivityDependency { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Activity name. + */ + activity: string; + /** + * Match-Condition for the dependency. + */ + dependencyConditions: DependencyCondition[]; +} + +/** + * User property. + */ +export interface UserProperty { + /** + * User property name. + */ + name: string; + /** + * User property value. Type: string (or Expression with resultType string). + */ + value: any; +} + +/** + * Definition of a single variable for a Pipeline. + */ +export interface VariableSpecification { + /** + * Variable type. + */ + type: VariableType; + /** + * Default value of variable. + */ + defaultValue?: any; +} + +/** + * The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + */ +export interface PipelineFolder { + /** + * The name of the folder that this Pipeline is in. + */ + name?: string; +} + +/** + * Response body with a run identifier. + */ +export interface CreateRunResponse { + /** + * Identifier of a run. + */ + runId: string; +} + +/** + * Query parameters for listing runs. + */ +export interface RunFilterParameters { + /** + * The continuation token for getting the next page of results. Null for first page. + */ + continuationToken?: string; + /** + * The time at or after which the run event was updated in 'ISO 8601' format. + */ + lastUpdatedAfter: Date; + /** + * The time at or before which the run event was updated in 'ISO 8601' format. + */ + lastUpdatedBefore: Date; + /** + * List of filters. + */ + filters?: RunQueryFilter[]; + /** + * List of OrderBy option. + */ + orderBy?: RunQueryOrderBy[]; +} + +/** + * Query filter option for listing runs. + */ +export interface RunQueryFilter { + /** + * Parameter name to be used for filter. The allowed operands to query pipeline runs are PipelineName, RunStart, RunEnd and Status; to query activity runs are ActivityName, ActivityRunStart, ActivityRunEnd, ActivityType and Status, and to query trigger runs are TriggerName, TriggerRunTimestamp and Status. + */ + operand: RunQueryFilterOperand; + /** + * Operator to be used for filter. + */ + operator: RunQueryFilterOperator; + /** + * List of filter values. + */ + values: string[]; +} + +/** + * An object to provide order by options for listing runs. + */ +export interface RunQueryOrderBy { + /** + * Parameter name to be used for order by. The allowed parameters to order by for pipeline runs are PipelineName, RunStart, RunEnd and Status; for activity runs are ActivityName, ActivityRunStart, ActivityRunEnd and Status; for trigger runs are TriggerName, TriggerRunTimestamp and Status. + */ + orderBy: RunQueryOrderByField; + /** + * Sorting order of the parameter. + */ + order: RunQueryOrder; +} + +/** + * A list pipeline runs. + */ +export interface PipelineRunsQueryResponse { + /** + * List of pipeline runs. + */ + value: PipelineRun[]; + /** + * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. + */ + continuationToken?: string; +} + +/** + * Information about a pipeline run. + */ +export interface PipelineRun { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Identifier of a run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runId?: string; + /** + * Identifier that correlates all the recovery runs of a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runGroupId?: string; + /** + * Indicates if the recovered pipeline run is the latest in its group. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly isLatest?: boolean; + /** + * The pipeline name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly pipelineName?: string; + /** + * The full or partial list of parameter name, value pair used in the pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly parameters?: { [propertyName: string]: string }; + /** + * Entity that started the pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly invokedBy?: PipelineRunInvokedBy; + /** + * The last updated timestamp for the pipeline run event in ISO8601 format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly lastUpdated?: Date; + /** + * The start time of a pipeline run in ISO8601 format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runStart?: Date; + /** + * The end time of a pipeline run in ISO8601 format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runEnd?: Date; + /** + * The duration of a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly durationInMs?: number; + /** + * The status of a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** + * The message from a pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; +} + +/** + * Provides entity name and id that started the pipeline run. + */ +export interface PipelineRunInvokedBy { + /** + * Name of the entity that started the pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * The ID of the entity that started the run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The type of the entity that started the run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly invokedByType?: string; +} + +/** + * A list activity runs. + */ +export interface ActivityRunsQueryResponse { + /** + * List of activity runs. + */ + value: ActivityRun[]; + /** + * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. + */ + continuationToken?: string; +} + +/** + * Information about an activity run in a pipeline. + */ +export interface ActivityRun { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The name of the pipeline. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly pipelineName?: string; + /** + * The id of the pipeline run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly pipelineRunId?: string; + /** + * The name of the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityName?: string; + /** + * The type of the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityType?: string; + /** + * The id of the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityRunId?: string; + /** + * The name of the compute linked service. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly linkedServiceName?: string; + /** + * The status of the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** + * The start time of the activity run in 'ISO 8601' format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityRunStart?: Date; + /** + * The end time of the activity run in 'ISO 8601' format. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly activityRunEnd?: Date; + /** + * The duration of the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly durationInMs?: number; + /** + * The input for the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly input?: any; + /** + * The output for the activity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly output?: any; + /** + * The error if any from the activity run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly error?: any; +} + +/** + * A list of trigger resources. + */ +export interface TriggerListResponse { + /** + * List of triggers. + */ + value: TriggerResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Azure Synapse nested object which contains information about creating pipeline run + */ +export interface Trigger { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "RerunTumblingWindowTrigger" + | "MultiplePipelineTrigger" + | "ScheduleTrigger" + | "BlobTrigger" + | "BlobEventsTrigger" + | "TumblingWindowTrigger" + | "ChainingTrigger"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Trigger description. + */ + description?: string; + /** + * Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly runtimeState?: TriggerRuntimeState; + /** + * List of tags that can be used for describing the trigger. + */ + annotations?: any[]; +} + +/** + * Defines the response of a trigger subscription operation. + */ +export interface TriggerSubscriptionOperationStatus { + /** + * Trigger name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerName?: string; + /** + * Event Subscription Status. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: EventSubscriptionStatus; +} + +/** + * A list of trigger runs. + */ +export interface TriggerRunsQueryResponse { + /** + * List of trigger runs. + */ + value: TriggerRun[]; + /** + * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. + */ + continuationToken?: string; +} + +/** + * Trigger runs. + */ +export interface TriggerRun { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Trigger run id. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerRunId?: string; + /** + * Trigger name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerName?: string; + /** + * Trigger type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerType?: string; + /** + * Trigger run start time. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggerRunTimestamp?: Date; + /** + * Trigger run status. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: TriggerRunStatus; + /** + * Trigger error message. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; + /** + * List of property name and value related to trigger run. Name, value pair depends on type of trigger. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly properties?: { [propertyName: string]: string }; + /** + * List of pipeline name and run Id triggered by the trigger run. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly triggeredPipelines?: { [propertyName: string]: string }; +} + +/** + * Azure Synapse nested object which contains a flow with data movements and transformations. + */ +export interface DataFlow { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MappingDataFlow"; + /** + * The description of the data flow. + */ + description?: string; + /** + * List of tags that can be used for describing the data flow. + */ + annotations?: any[]; + /** + * The folder that this data flow is in. If not specified, Data flow will appear at the root level. + */ + folder?: DataFlowFolder; +} + +/** + * The folder that this data flow is in. If not specified, Data flow will appear at the root level. + */ +export interface DataFlowFolder { + /** + * The name of the folder that this data flow is in. + */ + name?: string; +} + +/** + * A list of data flow resources. + */ +export interface DataFlowListResponse { + /** + * List of data flows. + */ + value: DataFlowResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Request body structure for creating data flow debug session. + */ +export interface CreateDataFlowDebugSessionRequest { + /** + * The name of the data flow. + */ + dataFlowName?: string; + /** + * The ID of existing Databricks cluster. + */ + existingClusterId?: string; + /** + * Timeout setting for Databricks cluster. + */ + clusterTimeout?: number; + /** + * The name of new Databricks cluster. + */ + newClusterName?: string; + /** + * The type of new Databricks cluster. + */ + newClusterNodeType?: string; + /** + * Data bricks linked service. + */ + dataBricksLinkedService?: LinkedServiceResource; +} + +/** + * Response body structure for creating data flow debug session. + */ +export interface CreateDataFlowDebugSessionResponse { + /** + * The ID of data flow debug session. + */ + sessionId?: string; +} + +/** + * A list of active debug sessions. + */ +export interface QueryDataFlowDebugSessionsResponse { + /** + * Array with all active debug sessions. + */ + value?: DataFlowDebugSessionInfo[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Data flow debug session info. + */ +export interface DataFlowDebugSessionInfo { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The name of the data flow. + */ + dataFlowName?: string; + /** + * Compute type of the cluster. + */ + computeType?: string; + /** + * Core count of the cluster. + */ + coreCount?: number; + /** + * Node count of the cluster. (deprecated property) + */ + nodeCount?: number; + /** + * Attached integration runtime name of data flow debug session. + */ + integrationRuntimeName?: string; + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * Start time of data flow debug session. + */ + startTime?: string; + /** + * Compute type of the cluster. + */ + timeToLiveInMinutes?: number; + /** + * Last activity time of data flow debug session. + */ + lastActivityTime?: string; +} + +/** + * Request body structure for starting data flow debug session. + */ +export interface DataFlowDebugPackage { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * Data flow instance. + */ + dataFlow?: DataFlowDebugResource; + /** + * List of datasets. + */ + datasets?: DatasetDebugResource[]; + /** + * List of linked services. + */ + linkedServices?: LinkedServiceDebugResource[]; + /** + * Staging info for debug session. + */ + staging?: DataFlowStagingInfo; + /** + * Data flow debug settings. + */ + debugSettings?: DataFlowDebugPackageDebugSettings; +} + +/** + * Azure Synapse nested debug resource. + */ +export interface SubResourceDebugResource { + /** + * The resource name. + */ + name?: string; +} + +/** + * Staging info for execute data flow activity. + */ +export interface DataFlowStagingInfo { + /** + * Staging linked service reference. + */ + linkedService?: LinkedServiceReference; + /** + * Folder path for staging blob. + */ + folderPath?: string; +} + +/** + * Data flow debug settings. + */ +export interface DataFlowDebugPackageDebugSettings { + /** + * Source setting for data flow debug. + */ + sourceSettings?: DataFlowSourceSetting[]; + /** + * Data flow parameters. + */ + parameters?: { [propertyName: string]: any }; + /** + * Parameters for dataset. + */ + datasetParameters?: any; +} + +/** + * Definition of data flow source setting for debug. + */ +export interface DataFlowSourceSetting { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The data flow source name. + */ + sourceName?: string; + /** + * Defines the row limit of data flow source in debug. + */ + rowLimit?: number; +} + +/** + * Response body structure for starting data flow debug session. + */ +export interface AddDataFlowToDebugSessionResponse { + /** + * The ID of data flow debug job version. + */ + jobVersion?: string; +} + +/** + * Request body structure for deleting data flow debug session. + */ +export interface DeleteDataFlowDebugSessionRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; +} + +/** + * Request body structure for data flow expression preview. + */ +export interface DataFlowDebugCommandRequest { + /** + * The ID of data flow debug session. + */ + sessionId: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; + /** + * The command name. + */ + commandName?: string; + /** + * The command payload object. + */ + commandPayload: any; +} + +/** + * Response body structure of data flow result for data preview, statistics or expression preview. + */ +export interface DataFlowDebugCommandResponse { + /** + * The run status of data preview, statistics or expression preview. + */ + status?: string; + /** + * The result data of data preview, statistics or expression preview. + */ + data?: string; +} + +/** + * A list of sql scripts resources. + */ +export interface SqlScriptsListResponse { + /** + * List of sql scripts. + */ + value: SqlScriptResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Sql Script resource type. + */ +export interface SqlScriptResource { + /** + * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + */ + name: string; + /** + * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Resource Etag. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly etag?: string; + /** + * Properties of sql script. + */ + properties: SqlScript; +} + +/** + * SQL script. + */ +export interface SqlScript { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The description of the SQL script. + */ + description?: string; + /** + * The type of the SQL script. + */ + type?: SqlScriptType; + /** + * The content of the SQL script. + */ + content: SqlScriptContent; +} + +/** + * The content of the SQL script. + */ +export interface SqlScriptContent { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * SQL query to execute. + */ + query: string; + /** + * The connection used to execute the SQL script. + */ + currentConnection: SqlConnection; + /** + * The metadata of the SQL script. + */ + metadata?: SqlScriptMetadata; +} + +/** + * The connection used to execute the SQL script. + */ +export interface SqlConnection { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The type of the connection. + */ + type: SqlConnectionType; + /** + * The identifier of the connection. + */ + name: string; +} + +/** + * The metadata of the SQL script. + */ +export interface SqlScriptMetadata { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The language of the SQL script. + */ + language?: string; +} + +/** + * A list of spark job definitions resources. + */ +export interface SparkJobDefinitionsListResponse { + /** + * List of spark job definitions. + */ + value: SparkJobDefinitionResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Spark job definition. + */ +export interface SparkJobDefinition { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The description of the Spark job definition. + */ + description?: string; + /** + * Big data pool reference. + */ + targetBigDataPool: BigDataPoolReference; + /** + * The required Spark version of the application. + */ + requiredSparkVersion?: string; + /** + * The language of the Spark application. + */ + language?: string; + /** + * The properties of the Spark job. + */ + jobProperties: SparkJobProperties; +} + +/** + * Big data pool reference. + */ +export interface BigDataPoolReference { + /** + * Big data pool reference type. + */ + type: BigDataPoolReferenceType; + /** + * Reference big data pool name. + */ + referenceName: string; +} + +/** + * The properties of the Spark job. + */ +export interface SparkJobProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The name of the job. + */ + name?: string; + /** + * File containing the application to execute. + */ + file: string; + /** + * Main class for Java/Scala application. + */ + className?: string; + /** + * Spark configuration properties. + */ + conf?: any; + /** + * Command line arguments for the application. + */ + args?: string[]; + /** + * Jars to be used in this job. + */ + jars?: string[]; + /** + * files to be used in this job. + */ + files?: string[]; + /** + * Archives to be used in this job. + */ + archives?: string[]; + /** + * Amount of memory to use for the driver process. + */ + driverMemory: string; + /** + * Number of cores to use for the driver. + */ + driverCores: number; + /** + * Amount of memory to use per executor process. + */ + executorMemory: string; + /** + * Number of cores to use for each executor. + */ + executorCores: number; + /** + * Number of executors to launch for this job. + */ + numExecutors: number; +} + +export interface SparkBatchJob { + livyInfo?: SparkBatchJobState; + /** + * The batch name. + */ + name?: string; + /** + * The workspace name. + */ + workspaceName?: string; + /** + * The Spark pool name. + */ + sparkPoolName?: string; + /** + * The submitter name. + */ + submitterName?: string; + /** + * The submitter identifier. + */ + submitterId?: string; + /** + * The artifact identifier. + */ + artifactId?: string; + /** + * The job type. + */ + jobType?: SparkJobType; + /** + * The Spark batch job result. + */ + result?: SparkBatchJobResultType; + /** + * The scheduler information. + */ + scheduler?: SparkScheduler; + /** + * The plugin information. + */ + plugin?: SparkServicePlugin; + /** + * The error information. + */ + errors?: SparkServiceError[]; + /** + * The tags. + */ + tags?: { [propertyName: string]: string }; + /** + * The session Id. + */ + id: number; + /** + * The application id of this session + */ + appId?: string | null; + /** + * The detailed application info. + */ + appInfo?: { [propertyName: string]: string } | null; + /** + * The batch state + */ + state?: string; + /** + * The log lines. + */ + logLines?: string[] | null; +} + +export interface SparkBatchJobState { + /** + * the time that at which "not_started" livy state was first seen. + */ + notStartedAt?: Date | null; + /** + * the time that at which "starting" livy state was first seen. + */ + startingAt?: Date | null; + /** + * the time that at which "running" livy state was first seen. + */ + runningAt?: Date | null; + /** + * time that at which "dead" livy state was first seen. + */ + deadAt?: Date | null; + /** + * the time that at which "success" livy state was first seen. + */ + successAt?: Date | null; + /** + * the time that at which "killed" livy state was first seen. + */ + terminatedAt?: Date | null; + /** + * the time that at which "recovering" livy state was first seen. + */ + recoveringAt?: Date | null; + /** + * the Spark job state. + */ + currentState?: string; + jobCreationRequest?: SparkRequest; +} + +export interface SparkRequest { + name?: string; + file?: string; + className?: string; + arguments?: string[]; + jars?: string[]; + pythonFiles?: string[]; + files?: string[]; + archives?: string[]; + /** + * Dictionary of + */ + configuration?: { [propertyName: string]: string }; + driverMemory?: string; + driverCores?: number; + executorMemory?: string; + executorCores?: number; + executorCount?: number; +} + +export interface SparkScheduler { + submittedAt?: Date | null; + scheduledAt?: Date | null; + endedAt?: Date | null; + cancellationRequestedAt?: Date; + currentState?: SchedulerCurrentState; +} + +export interface SparkServicePlugin { + preparationStartedAt?: Date | null; + resourceAcquisitionStartedAt?: Date | null; + submissionStartedAt?: Date | null; + monitoringStartedAt?: Date | null; + cleanupStartedAt?: Date | null; + currentState?: PluginCurrentState; +} + +export interface SparkServiceError { + message?: string; + errorCode?: string; + source?: SparkErrorSource; +} + +/** + * A list of Notebook resources. + */ +export interface NotebookListResponse { + /** + * List of Notebooks. + */ + value: NotebookResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Notebook resource type. + */ +export interface NotebookResource { + /** + * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + */ + name: string; + /** + * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Resource Etag. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly etag?: string; + /** + * Properties of Notebook. + */ + properties: Notebook; +} + +/** + * Notebook. + */ +export interface Notebook { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The description of the notebook. + */ + description?: string; + /** + * Big data pool reference. + */ + bigDataPool?: BigDataPoolReference | null; + /** + * Session properties. + */ + sessionProperties?: NotebookSessionProperties | null; + /** + * Notebook root-level metadata. + */ + metadata: NotebookMetadata; + /** + * Notebook format (major number). Incremented between backwards incompatible changes to the notebook format. + */ + nbformat: number; + /** + * Notebook format (minor number). Incremented for backward compatible changes to the notebook format. + */ + nbformatMinor: number; + /** + * Array of cells of the current notebook. + */ + cells: NotebookCell[]; +} + +/** + * Session properties. + */ +export interface NotebookSessionProperties { + /** + * Amount of memory to use for the driver process. + */ + driverMemory: string; + /** + * Number of cores to use for the driver. + */ + driverCores: number; + /** + * Amount of memory to use per executor process. + */ + executorMemory: string; + /** + * Number of cores to use for each executor. + */ + executorCores: number; + /** + * Number of executors to launch for this session. + */ + numExecutors: number; +} + +/** + * Notebook root-level metadata. + */ +export interface NotebookMetadata { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Kernel information. + */ + kernelspec?: NotebookKernelSpec; + /** + * Language info. + */ + languageInfo?: NotebookLanguageInfo; +} + +/** + * Kernel information. + */ +export interface NotebookKernelSpec { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Name of the kernel specification. + */ + name: string; + /** + * Name to display in UI. + */ + displayName: string; +} + +/** + * Language info. + */ +export interface NotebookLanguageInfo { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The programming language which this kernel runs. + */ + name: string; + /** + * The codemirror mode to use for code in this language. + */ + codemirrorMode?: string; +} + +/** + * Notebook cell. + */ +export interface NotebookCell { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * String identifying the type of cell. + */ + cellType: string; + /** + * Cell-level metadata. + */ + metadata: any; + /** + * Contents of the cell, represented as an array of lines. + */ + source: string[]; + /** + * Attachments associated with the cell. + */ + attachments?: any; + /** + * Cell-level output items. + */ + outputs?: NotebookCellOutputItem[]; +} + +/** + * An item of the notebook cell execution output. + */ +export interface NotebookCellOutputItem { + /** + * For output_type=stream, determines the name of stream (stdout / stderr). + */ + name?: string; + /** + * Execution sequence number. + */ + executionCount?: number; + /** + * Execution, display, or stream outputs. + */ + outputType: CellOutputType; + /** + * For output_type=stream, the stream's text output, represented as a string or an array of strings. + */ + text?: any; + /** + * Output data. Use MIME type as key, and content as value. + */ + data?: any; + /** + * Metadata for the output item. + */ + metadata?: any; +} + +/** + * Details of the data lake storage account associated with the workspace + */ +export interface DataLakeStorageAccountDetails { + /** + * Account URL + */ + accountUrl?: string; + /** + * Filesystem name + */ + filesystem?: string; +} + +/** + * Virtual Network Profile + */ +export interface VirtualNetworkProfile { + /** + * Subnet ID used for computes in workspace + */ + computeSubnetId?: string; +} + +/** + * Private endpoint details + */ +export interface PrivateEndpoint { + /** + * Resource id of the private endpoint. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; +} + +/** + * Connection state details of the private endpoint + */ +export interface PrivateLinkServiceConnectionState { + /** + * The private link service connection status. + */ + status?: string; + /** + * The private link service connection description. + */ + description?: string; + /** + * The actions required for private link service connection. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly actionsRequired?: string; +} + +/** + * Details of the encryption associated with the workspace + */ +export interface EncryptionDetails { + /** + * Double Encryption enabled + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly doubleEncryptionEnabled?: boolean; + /** + * Customer Managed Key Details + */ + cmk?: CustomerManagedKeyDetails; +} + +/** + * Details of the customer managed key associated with the workspace + */ +export interface CustomerManagedKeyDetails { + /** + * The customer managed key status on the workspace + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** + * The key object of the workspace + */ + key?: WorkspaceKeyDetails; +} + +/** + * Details of the customer managed key associated with the workspace + */ +export interface WorkspaceKeyDetails { + /** + * Workspace Key sub-resource name + */ + name?: string; + /** + * Workspace Key sub-resource key vault url + */ + keyVaultUrl?: string; +} + +/** + * Managed Virtual Network Settings + */ +export interface ManagedVirtualNetworkSettings { + /** + * Prevent Data Exfiltration + */ + preventDataExfiltration?: boolean; + /** + * Linked Access Check On Target Resource + */ + linkedAccessCheckOnTargetResource?: boolean; + /** + * Allowed Aad Tenant Ids For Linking + */ + allowedAadTenantIdsForLinking?: string[]; +} + +/** + * Git integration settings + */ +export interface WorkspaceRepositoryConfiguration { + /** + * Type of workspace repositoryID configuration. Example WorkspaceVSTSConfiguration, WorkspaceGitHubConfiguration + */ + type?: string; + /** + * GitHub Enterprise host name. For example: https://github.mydomain.com + */ + hostName?: string; + /** + * Account name + */ + accountName?: string; + /** + * VSTS project name + */ + projectName?: string; + /** + * Repository name + */ + repositoryName?: string; + /** + * Collaboration branch + */ + collaborationBranch?: string; + /** + * Root folder to use in the repository + */ + rootFolder?: string; +} + +/** + * Purview Configuration + */ +export interface PurviewConfiguration { + /** + * Purview Resource ID + */ + purviewResourceId?: string; +} + +/** + * The workspace managed identity + */ +export interface ManagedIdentity { + /** + * The principal ID of the workspace managed identity + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly principalId?: string; + /** + * The tenant ID of the workspace managed identity + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly tenantId?: string; + /** + * The type of managed identity for the workspace + */ + type?: ResourceIdentityType; +} + +/** + * Contains details when the response code indicates an error. + */ +export interface ErrorContract { + /** + * The error details. + */ + error?: ErrorResponse; +} + +/** + * Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.) + */ +export interface ErrorResponse { + /** + * The error code. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly code?: string; + /** + * The error message. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly message?: string; + /** + * The error target. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly target?: string; + /** + * The error details. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly details?: ErrorResponse[]; + /** + * The error additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly additionalInfo?: ErrorAdditionalInfo[]; +} + +/** + * The resource management error additional info. + */ +export interface ErrorAdditionalInfo { + /** + * The additional info type. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * The additional info. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly info?: any; +} + +/** + * List of SQL pools + */ +export interface SqlPoolInfoListResult { + /** + * Link to the next page of results + */ + nextLink?: string; + /** + * List of SQL pools + */ + value?: SqlPool[]; +} + +/** + * SQL pool SKU + */ +export interface Sku { + /** + * The service tier + */ + tier?: string; + /** + * The SKU name + */ + name?: string; + /** + * If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted. + */ + capacity?: number; +} + +/** + * Collection of Big Data pool information + */ +export interface BigDataPoolResourceInfoListResult { + /** + * Link to the next page of results + */ + nextLink?: string; + /** + * List of Big Data pools + */ + value?: BigDataPoolResourceInfo[]; +} + +/** + * Auto-scaling properties of a Big Data pool powered by Apache Spark + */ +export interface AutoScaleProperties { + /** + * The minimum number of nodes the Big Data pool can support. + */ + minNodeCount?: number; + /** + * Whether automatic scaling is enabled for the Big Data pool. + */ + enabled?: boolean; + /** + * The maximum number of nodes the Big Data pool can support. + */ + maxNodeCount?: number; +} + +/** + * Auto-pausing properties of a Big Data pool powered by Apache Spark + */ +export interface AutoPauseProperties { + /** + * Number of minutes of idle time before the Big Data pool is automatically paused. + */ + delayInMinutes?: number; + /** + * Whether auto-pausing is enabled for the Big Data pool. + */ + enabled?: boolean; +} + +/** + * Library requirements for a Big Data pool powered by Apache Spark + */ +export interface LibraryRequirements { + /** + * The last update time of the library requirements file. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly time?: Date; + /** + * The library requirements. + */ + content?: string; + /** + * The filename of the library requirements file. + */ + filename?: string; +} + +/** + * A list of integration runtime resources. + */ +export interface IntegrationRuntimeListResponse { + /** + * List of integration runtimes. + */ + value: IntegrationRuntimeResource[]; + /** + * The link to the next page of results, if any remaining results exist. + */ + nextLink?: string; +} + +/** + * Azure Synapse nested object which serves as a compute resource for activities. + */ +export interface IntegrationRuntime { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Managed" | "SelfHosted"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Integration runtime description. + */ + description?: string; +} + +export interface GitHubAccessTokenRequest { + /** + * The GitHub Client Id. + */ + gitHubClientId: string; + /** + * The GitHub Access code. + */ + gitHubAccessCode: string; + /** + * The GitHub access token base URL. + */ + gitHubAccessTokenBaseUrl: string; +} + +export interface GitHubAccessTokenResponse { + gitHubAccessToken?: string; +} + +/** + * Azure Synapse expression definition. + */ +export interface Expression { + /** + * Expression type. + */ + type: ExpressionType; + /** + * Expression value. + */ + value: string; +} + +/** + * The base definition of a secret type. + */ +export interface SecretBase { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SecureString" | "AzureKeyVaultSecret"; +} + +/** + * Request body structure for starting data flow debug session. + */ +export interface StartDataFlowDebugSessionRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * Data flow instance. + */ + dataFlow?: DataFlowResource; + /** + * List of datasets. + */ + datasets?: DatasetResource[]; + /** + * List of linked services. + */ + linkedServices?: LinkedServiceResource[]; + /** + * Staging info for debug session. + */ + staging?: any; + /** + * Data flow debug settings. + */ + debugSettings?: any; + /** + * The type of new Databricks cluster. + */ + incrementalDebug?: boolean; +} + +/** + * Response body structure for starting data flow debug session. + */ +export interface StartDataFlowDebugSessionResponse { + /** + * The ID of data flow debug job version. + */ + jobVersion?: string; +} + +/** + * Request body structure for data flow preview data. + */ +export interface DataFlowDebugPreviewDataRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; + /** + * The output stream name. + */ + streamName?: string; + /** + * The row limit for preview request. + */ + rowLimits?: number; +} + +/** + * Request body structure for data flow statistics. + */ +export interface DataFlowDebugStatisticsRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; + /** + * The output stream name. + */ + streamName?: string; + /** + * List of column names. + */ + columns?: string[]; +} + +/** + * Request body structure for data flow expression preview. + */ +export interface EvaluateDataFlowExpressionRequest { + /** + * The ID of data flow debug session. + */ + sessionId?: string; + /** + * The data flow which contains the debug session. + */ + dataFlowName?: string; + /** + * The output stream name. + */ + streamName?: string; + /** + * The row limit for preview request. + */ + rowLimits?: number; + /** + * The expression for preview. + */ + expression?: string; +} + +/** + * Response body structure of data flow query for data preview, statistics or expression preview. + */ +export interface DataFlowDebugQueryResponse { + /** + * The run ID of data flow debug session. + */ + runId?: string; +} + +/** + * Response body structure of data flow result for data preview, statistics or expression preview. + */ +export interface DataFlowDebugResultResponse { + /** + * The run status of data preview, statistics or expression preview. + */ + status?: string; + /** + * The result data of data preview, statistics or expression preview. + */ + data?: string; +} + +/** + * Defines the response of a provision trigger dependency operation. + */ +export interface TriggerDependencyProvisioningStatus { + /** + * Trigger name. + */ + triggerName: string; + /** + * Provisioning status. + */ + provisioningStatus: string; +} + +/** + * Pipeline reference type. + */ +export interface PipelineReference { + /** + * Pipeline reference type. + */ + type: PipelineReferenceType; + /** + * Reference pipeline name. + */ + referenceName: string; + /** + * Reference name. + */ + name?: string; +} + +/** + * Pipeline that needs to be triggered with the given parameters. + */ +export interface TriggerPipelineReference { + /** + * Pipeline reference. + */ + pipelineReference?: PipelineReference; + /** + * Pipeline parameters. + */ + parameters?: { [propertyName: string]: any }; +} + +/** + * Parameters for updating a workspace resource. + */ +export interface WorkspaceUpdateParameters { + /** + * The resource tags. + */ + tags?: { [propertyName: string]: string }; + /** + * Managed service identity of the workspace. + */ + identity?: WorkspaceIdentity; +} + +/** + * Identity properties of the workspace resource. + */ +export interface WorkspaceIdentity { + /** + * The identity type. Currently the only supported type is 'SystemAssigned'. + */ + type: "SystemAssigned"; + /** + * The principal id of the identity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly principalId?: string; + /** + * The client tenant id of the identity. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly tenantId?: string; +} + +/** + * Dataset reference type. + */ +export interface DatasetReference { + /** + * Dataset reference type. + */ + type: DatasetReferenceType; + /** + * Reference dataset name. + */ + referenceName: string; + /** + * Arguments for dataset. + */ + parameters?: { [propertyName: string]: any }; +} + +/** + * Data flow reference type. + */ +export interface DataFlowReference { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Data flow reference type. + */ + type: DataFlowReferenceType; + /** + * Reference data flow name. + */ + referenceName: string; + /** + * Reference data flow parameters from dataset. + */ + datasetParameters?: any; +} + +/** + * Rerun tumbling window trigger Parameters. + */ +export interface RerunTumblingWindowTriggerActionParameters { + /** + * The start time for the time period for which restatement is initiated. Only UTC time is currently supported. + */ + startTime: Date; + /** + * The end time for the time period for which restatement is initiated. Only UTC time is currently supported. + */ + endTime: Date; + /** + * The max number of parallel time windows (ready for execution) for which a rerun is triggered. + */ + maxConcurrency: number; +} + +/** + * A list of rerun triggers. + */ +export interface RerunTriggerListResponse { + /** + * List of rerun triggers. + */ + value: RerunTriggerResource[]; + /** + * The continuation token for getting the next page of results, if any remaining results exist, null otherwise. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly nextLink?: string; +} + +/** + * The request payload of get SSIS object metadata. + */ +export interface GetSsisObjectMetadataRequest { + /** + * Metadata path. + */ + metadataPath?: string; +} + +/** + * The status of the operation. + */ +export interface SsisObjectMetadataStatusResponse { + /** + * The status of the operation. + */ + status?: string; + /** + * The operation name. + */ + name?: string; + /** + * The operation properties. + */ + properties?: string; + /** + * The operation error message. + */ + error?: string; +} + +/** + * The exposure control request. + */ +export interface ExposureControlRequest { + /** + * The feature name. + */ + featureName?: string; + /** + * The feature type. + */ + featureType?: string; +} + +/** + * The exposure control response. + */ +export interface ExposureControlResponse { + /** + * The feature name. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly featureName?: string; + /** + * The feature value. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly value?: string; +} + +/** + * Synapse notebook reference type. + */ +export interface SynapseNotebookReference { + /** + * Synapse notebook reference type. + */ + type: NotebookReferenceType; + /** + * Reference notebook name. + */ + referenceName: string; +} + +/** + * Synapse spark job reference type. + */ +export interface SynapseSparkJobReference { + /** + * Synapse spark job reference type. + */ + type: SparkJobReferenceType; + /** + * Reference spark job name. + */ + referenceName: string; +} + +/** + * SQL pool reference type. + */ +export interface SqlPoolReference { + /** + * SQL pool reference type. + */ + type: SqlPoolReferenceType; + /** + * Reference SQL pool name. + */ + referenceName: string; +} + +/** + * A data flow transformation. + */ +export interface Transformation { + /** + * Transformation name. + */ + name: string; + /** + * Transformation description. + */ + description?: string; +} + +/** + * Dataset location. + */ +export interface DatasetLocation { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AzureBlobStorageLocation" + | "AzureBlobFSLocation" + | "AzureDataLakeStoreLocation" + | "AmazonS3Location" + | "FileServerLocation" + | "AzureFileStorageLocation" + | "GoogleCloudStorageLocation" + | "FtpServerLocation" + | "SftpLocation" + | "HttpServerLocation" + | "HdfsLocation"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Specify the folder path of dataset. Type: string (or Expression with resultType string) + */ + folderPath?: any; + /** + * Specify the file name of dataset. Type: string (or Expression with resultType string). + */ + fileName?: any; +} + +/** + * Columns that define the structure of the dataset. + */ +export interface DatasetDataElement { + /** + * Name of the column. Type: string (or Expression with resultType string). + */ + name?: any; + /** + * Type of the column. Type: string (or Expression with resultType string). + */ + type?: any; +} + +/** + * Columns that define the physical type schema of the dataset. + */ +export interface DatasetSchemaDataElement { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Name of the schema column. Type: string (or Expression with resultType string). + */ + name?: any; + /** + * Type of the schema column. Type: string (or Expression with resultType string). + */ + type?: any; +} + +/** + * The format definition of a storage. + */ +export interface DatasetStorageFormat { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TextFormat" | "JsonFormat" | "AvroFormat" | "OrcFormat" | "ParquetFormat"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Serializer. Type: string (or Expression with resultType string). + */ + serializer?: any; + /** + * Deserializer. Type: string (or Expression with resultType string). + */ + deserializer?: any; +} + +/** + * The compression method used on a dataset. + */ +export interface DatasetCompression { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BZip2" | "GZip" | "Deflate" | "ZipDeflate"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; +} + +/** + * Base definition of WebLinkedServiceTypeProperties, this typeProperties is polymorphic based on authenticationType, so not flattened in SDK models. + */ +export interface WebLinkedServiceTypeProperties { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authenticationType: "Anonymous" | "Basic" | "ClientCertificate"; + /** + * The URL of the web service endpoint, e.g. http://www.microsoft.com . Type: string (or Expression with resultType string). + */ + url: any; +} + +/** + * Custom script action to run on HDI ondemand cluster once it's up. + */ +export interface ScriptAction { + /** + * The user provided name of the script action. + */ + name: string; + /** + * The URI for the script action. + */ + uri: string; + /** + * The node types on which the script action should be executed. + */ + roles: HdiNodeTypes; + /** + * The parameters for the script action. + */ + parameters?: string; +} + +/** + * Execution policy for an activity. + */ +export interface ActivityPolicy { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Specifies the timeout for the activity to run. The default timeout is 7 days. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + timeout?: any; + /** + * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. + */ + retry?: any; + /** + * Interval between each retry attempt (in seconds). The default is 30 sec. + */ + retryIntervalInSeconds?: number; + /** + * When set to true, Input from activity is considered as secure and will not be logged to monitoring. + */ + secureInput?: boolean; + /** + * When set to true, Output from activity is considered as secure and will not be logged to monitoring. + */ + secureOutput?: boolean; +} + +/** + * Connector read setting. + */ +export interface StoreReadSettings { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AzureBlobStorageReadSettings" + | "AzureBlobFSReadSettings" + | "AzureDataLakeStoreReadSettings" + | "AmazonS3ReadSettings" + | "FileServerReadSettings" + | "AzureFileStorageReadSettings" + | "GoogleCloudStorageReadSettings" + | "FtpReadSettings" + | "SftpReadSettings" + | "HttpReadSettings" + | "HdfsReadSettings"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). + */ + maxConcurrentConnections?: any; +} + +/** + * Connector write settings. + */ +export interface StoreWriteSettings { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "SftpWriteSettings" + | "AzureBlobStorageWriteSettings" + | "AzureBlobFSWriteSettings" + | "AzureDataLakeStoreWriteSettings" + | "FileServerWriteSettings"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). + */ + maxConcurrentConnections?: any; + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; +} + +/** + * Distcp settings. + */ +export interface DistcpSettings { + /** + * Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType string). + */ + resourceManagerEndpoint: any; + /** + * Specifies an existing folder path which will be used to store temp Distcp command script. The script file is generated by ADF and will be removed after Copy job finished. Type: string (or Expression with resultType string). + */ + tempScriptPath: any; + /** + * Specifies the Distcp options. Type: string (or Expression with resultType string). + */ + distcpOptions?: any; +} + +/** + * Format read settings. + */ +export interface FormatReadSettings { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DelimitedTextReadSettings"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; +} + +/** + * Format write settings. + */ +export interface FormatWriteSettings { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AvroWriteSettings" | "DelimitedTextWriteSettings" | "JsonWriteSettings"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; +} + +/** + * A copy activity source. + */ +export interface CopySource { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "AvroSource" + | "ParquetSource" + | "DelimitedTextSource" + | "JsonSource" + | "OrcSource" + | "BinarySource" + | "TabularSource" + | "AzureTableSource" + | "BlobSource" + | "DocumentDbCollectionSource" + | "CosmosDbSqlApiSource" + | "DynamicsSource" + | "DynamicsCrmSource" + | "CommonDataServiceForAppsSource" + | "RelationalSource" + | "InformixSource" + | "MicrosoftAccessSource" + | "Db2Source" + | "OdbcSource" + | "MySqlSource" + | "PostgreSqlSource" + | "SybaseSource" + | "SapBwSource" + | "ODataSource" + | "SalesforceSource" + | "SalesforceServiceCloudSource" + | "SapCloudForCustomerSource" + | "SapEccSource" + | "SapHanaSource" + | "SapOpenHubSource" + | "SapTableSource" + | "RestSource" + | "SqlSource" + | "SqlServerSource" + | "AzureSqlSource" + | "SqlMISource" + | "SqlDWSource" + | "FileSystemSource" + | "HdfsSource" + | "AzureMySqlSource" + | "AzureDataExplorerSource" + | "OracleSource" + | "TeradataSource" + | "WebSource" + | "CassandraSource" + | "MongoDbSource" + | "MongoDbV2Source" + | "CosmosDbMongoDbApiSource" + | "Office365Source" + | "AzureDataLakeStoreSource" + | "AzureBlobFSSource" + | "HttpSource" + | "AmazonMWSSource" + | "AzurePostgreSqlSource" + | "ConcurSource" + | "CouchbaseSource" + | "DrillSource" + | "EloquaSource" + | "GoogleBigQuerySource" + | "GreenplumSource" + | "HBaseSource" + | "HiveSource" + | "HubspotSource" + | "ImpalaSource" + | "JiraSource" + | "MagentoSource" + | "MariaDBSource" + | "AzureMariaDBSource" + | "MarketoSource" + | "PaypalSource" + | "PhoenixSource" + | "PrestoSource" + | "QuickBooksSource" + | "ServiceNowSource" + | "ShopifySource" + | "SparkSource" + | "SquareSource" + | "XeroSource" + | "ZohoSource" + | "NetezzaSource" + | "VerticaSource" + | "SalesforceMarketingCloudSource" + | "ResponsysSource" + | "DynamicsAXSource" + | "OracleServiceCloudSource" + | "GoogleAdWordsSource" + | "AmazonRedshiftSource"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Source retry count. Type: integer (or Expression with resultType integer). + */ + sourceRetryCount?: any; + /** + * Source retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + sourceRetryWait?: any; + /** + * The maximum concurrent connection count for the source data store. Type: integer (or Expression with resultType integer). + */ + maxConcurrentConnections?: any; +} + +/** + * A copy activity sink. + */ +export interface CopySink { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "DelimitedTextSink" + | "JsonSink" + | "OrcSink" + | "AzurePostgreSqlSink" + | "AzureMySqlSink" + | "SapCloudForCustomerSink" + | "AzureQueueSink" + | "AzureTableSink" + | "AvroSink" + | "ParquetSink" + | "BinarySink" + | "BlobSink" + | "FileSystemSink" + | "DocumentDbCollectionSink" + | "CosmosDbSqlApiSink" + | "SqlSink" + | "SqlServerSink" + | "AzureSqlSink" + | "SqlMISink" + | "SqlDWSink" + | "OracleSink" + | "AzureDataLakeStoreSink" + | "AzureBlobFSSink" + | "AzureSearchIndexSink" + | "OdbcSink" + | "InformixSink" + | "MicrosoftAccessSink" + | "DynamicsSink" + | "DynamicsCrmSink" + | "CommonDataServiceForAppsSink" + | "AzureDataExplorerSink" + | "SalesforceSink" + | "SalesforceServiceCloudSink" + | "CosmosDbMongoDbApiSink"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Write batch size. Type: integer (or Expression with resultType integer), minimum: 0. + */ + writeBatchSize?: any; + /** + * Write batch timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + writeBatchTimeout?: any; + /** + * Sink retry count. Type: integer (or Expression with resultType integer). + */ + sinkRetryCount?: any; + /** + * Sink retry wait. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + sinkRetryWait?: any; + /** + * The maximum concurrent connection count for the sink data store. Type: integer (or Expression with resultType integer). + */ + maxConcurrentConnections?: any; +} + +/** + * Staging settings. + */ +export interface StagingSettings { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Staging linked service reference. + */ + linkedServiceName: LinkedServiceReference; + /** + * The path to storage for storing the interim data. Type: string (or Expression with resultType string). + */ + path?: any; + /** + * Specifies whether to use compression when copying data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableCompression?: any; +} + +/** + * Redirect incompatible row settings + */ +export interface RedirectIncompatibleRowSettings { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Name of the Azure Storage, Storage SAS, or Azure Data Lake Store linked service used for redirecting incompatible row. Must be specified if redirectIncompatibleRowSettings is specified. Type: string (or Expression with resultType string). + */ + linkedServiceName: any; + /** + * The path for storing the redirect incompatible row data. Type: string (or Expression with resultType string). + */ + path?: any; +} + +/** + * The settings that will be leveraged for SAP HANA source partitioning. + */ +export interface SapHanaPartitionSettings { + /** + * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; +} + +/** + * The settings that will be leveraged for SAP table source partitioning. + */ +export interface SapTablePartitionSettings { + /** + * The name of the column that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; + /** + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionUpperBound?: any; + /** + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionLowerBound?: any; + /** + * The maximum value of partitions the table will be split into. Type: integer (or Expression with resultType string). + */ + maxPartitionsNumber?: any; +} + +/** + * SQL stored procedure parameter. + */ +export interface StoredProcedureParameter { + /** + * Stored procedure parameter value. Type: string (or Expression with resultType string). + */ + value?: any; + /** + * Stored procedure parameter type. + */ + type?: StoredProcedureParameterType; +} + +/** + * The settings that will be leveraged for Oracle source partitioning. + */ +export interface OraclePartitionSettings { + /** + * Names of the physical partitions of Oracle table. + */ + partitionNames?: any; + /** + * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; + /** + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionUpperBound?: any; + /** + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionLowerBound?: any; +} + +/** + * The settings that will be leveraged for teradata source partitioning. + */ +export interface TeradataPartitionSettings { + /** + * The name of the column that will be used for proceeding range or hash partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; + /** + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionUpperBound?: any; + /** + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionLowerBound?: any; +} + +/** + * Cursor methods for Mongodb query + */ +export interface MongoDbCursorMethodsProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Specifies the fields to return in the documents that match the query filter. To return all fields in the matching documents, omit this parameter. Type: string (or Expression with resultType string). + */ + project?: any; + /** + * Specifies the order in which the query returns matching documents. Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + */ + sort?: any; + /** + * Specifies the how many documents skipped and where MongoDB begins returning results. This approach may be useful in implementing paginated results. Type: integer (or Expression with resultType integer). + */ + skip?: any; + /** + * Specifies the maximum number of documents the server returns. limit() is analogous to the LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer). + */ + limit?: any; +} + +/** + * The settings that will be leveraged for Netezza source partitioning. + */ +export interface NetezzaPartitionSettings { + /** + * The name of the column in integer type that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionColumnName?: any; + /** + * The maximum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionUpperBound?: any; + /** + * The minimum value of column specified in partitionColumnName that will be used for proceeding range partitioning. Type: string (or Expression with resultType string). + */ + partitionLowerBound?: any; +} + +/** + * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. + */ +export interface RedshiftUnloadSettings { + /** + * The name of the Amazon S3 linked service which will be used for the unload operation when copying from the Amazon Redshift source. + */ + s3LinkedServiceName: LinkedServiceReference; + /** + * The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type: string (or Expression with resultType string). + */ + bucketName: any; +} + +/** + * PolyBase settings. + */ +export interface PolybaseSettings { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Reject type. + */ + rejectType?: PolybaseSettingsRejectType; + /** + * Specifies the value or the percentage of rows that can be rejected before the query fails. Type: number (or Expression with resultType number), minimum: 0. + */ + rejectValue?: any; + /** + * Determines the number of rows to attempt to retrieve before the PolyBase recalculates the percentage of rejected rows. Type: integer (or Expression with resultType integer), minimum: 0. + */ + rejectSampleValue?: any; + /** + * Specifies how to handle missing values in delimited text files when PolyBase retrieves data from the text file. Type: boolean (or Expression with resultType boolean). + */ + useTypeDefault?: any; +} + +/** + * DW Copy Command settings. + */ +export interface DWCopyCommandSettings { + /** + * Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). + */ + defaultValues?: DWCopyCommandDefaultValue[]; + /** + * Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" } + */ + additionalOptions?: { [propertyName: string]: string }; +} + +/** + * Default value. + */ +export interface DWCopyCommandDefaultValue { + /** + * Column name. Type: object (or Expression with resultType string). + */ + columnName?: any; + /** + * The default value of the column. Type: object (or Expression with resultType string). + */ + defaultValue?: any; +} + +/** + * Log storage settings. + */ +export interface LogStorageSettings { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Log storage linked service reference. + */ + linkedServiceName: LinkedServiceReference; + /** + * The path to storage for storing detailed logs of activity execution. Type: string (or Expression with resultType string). + */ + path?: any; +} + +/** + * A copy activity translator. + */ +export interface CopyTranslator { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TabularTranslator"; + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; +} + +/** + * SSIS package location. + */ +export interface SsisPackageLocation { + /** + * The SSIS package path. Type: string (or Expression with resultType string). + */ + packagePath?: any; + /** + * The type of SSIS package location. + */ + type?: SsisPackageLocationType; + /** + * Password of the package. + */ + packagePassword?: SecretBaseUnion; + /** + * The package access credential. + */ + accessCredential?: SsisAccessCredential; + /** + * The configuration file of the package execution. Type: string (or Expression with resultType string). + */ + configurationPath?: any; + /** + * The package name. + */ + packageName?: string; + /** + * The embedded package content. Type: string (or Expression with resultType string). + */ + packageContent?: any; + /** + * The embedded package last modified date. + */ + packageLastModifiedDate?: string; + /** + * The embedded child package list. + */ + childPackages?: SsisChildPackage[]; +} + +/** + * SSIS access credential. + */ +export interface SsisAccessCredential { + /** + * Domain for windows authentication. + */ + domain: any; + /** + * UseName for windows authentication. + */ + userName: any; + /** + * Password for windows authentication. + */ + password: SecretBaseUnion; +} + +/** + * SSIS embedded child package. + */ +export interface SsisChildPackage { + /** + * Path for embedded child package. Type: string (or Expression with resultType string). + */ + packagePath: any; + /** + * Name for embedded child package. + */ + packageName?: string; + /** + * Content for embedded child package. Type: string (or Expression with resultType string). + */ + packageContent: any; + /** + * Last modified date for embedded child package. + */ + packageLastModifiedDate?: string; +} + +/** + * SSIS package execution credential. + */ +export interface SsisExecutionCredential { + /** + * Domain for windows authentication. + */ + domain: any; + /** + * UseName for windows authentication. + */ + userName: any; + /** + * Password for windows authentication. + */ + password: SecureString; +} + +/** + * SSIS execution parameter. + */ +export interface SsisExecutionParameter { + /** + * SSIS package execution parameter value. Type: string (or Expression with resultType string). + */ + value: any; +} + +/** + * SSIS property override. + */ +export interface SsisPropertyOverride { + /** + * SSIS package property override value. Type: string (or Expression with resultType string). + */ + value: any; + /** + * Whether SSIS package property override value is sensitive data. Value will be encrypted in SSISDB if it is true + */ + isSensitive?: boolean; +} + +/** + * SSIS package execution log location + */ +export interface SsisLogLocation { + /** + * The SSIS package execution log path. Type: string (or Expression with resultType string). + */ + logPath: any; + /** + * The type of SSIS log location. + */ + type: SsisLogLocationType; + /** + * The package execution log access credential. + */ + accessCredential?: SsisAccessCredential; + /** + * Specifies the interval to refresh log. The default interval is 5 minutes. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + logRefreshInterval?: any; +} + +/** + * Reference objects for custom activity + */ +export interface CustomActivityReferenceObject { + /** + * Linked service references. + */ + linkedServices?: LinkedServiceReference[]; + /** + * Dataset references. + */ + datasets?: DatasetReference[]; +} + +/** + * Web activity authentication properties. + */ +export interface WebActivityAuthentication { + /** + * Web activity authentication (Basic/ClientCertificate/MSI) + */ + type: string; + /** + * Base64-encoded contents of a PFX file. + */ + pfx?: SecretBaseUnion; + /** + * Web activity authentication user name for basic authentication. + */ + username?: string; + /** + * Password for the PFX file or basic authentication. + */ + password?: SecretBaseUnion; + /** + * Resource for which Azure Auth token will be requested when using MSI Authentication. + */ + resource?: string; +} + +/** + * Switch cases with have a value and corresponding activities. + */ +export interface SwitchCase { + /** + * Expected value that satisfies the expression result of the 'on' property. + */ + value?: string; + /** + * List of activities to execute for satisfied case condition. + */ + activities?: ActivityUnion[]; +} + +/** + * Azure ML WebService Input/Output file + */ +export interface AzureMLWebServiceFile { + /** + * The relative file path, including container name, in the Azure Blob Storage specified by the LinkedService. Type: string (or Expression with resultType string). + */ + filePath: any; + /** + * Reference to an Azure Storage LinkedService, where Azure ML WebService Input/Output file located. + */ + linkedServiceName: LinkedServiceReference; +} + +/** + * Compute properties for data flow activity. + */ +export interface ExecuteDataFlowActivityTypePropertiesCompute { + /** + * Compute type of the cluster which will execute data flow job. + */ + computeType?: DataFlowComputeType; + /** + * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + */ + coreCount?: number; +} + +/** + * The workflow trigger recurrence. + */ +export interface ScheduleTriggerRecurrence { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The frequency. + */ + frequency?: RecurrenceFrequency; + /** + * The interval. + */ + interval?: number; + /** + * The start time. + */ + startTime?: Date; + /** + * The end time. + */ + endTime?: Date; + /** + * The time zone. + */ + timeZone?: string; + /** + * The recurrence schedule. + */ + schedule?: RecurrenceSchedule; +} + +/** + * The recurrence schedule. + */ +export interface RecurrenceSchedule { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The minutes. + */ + minutes?: number[]; + /** + * The hours. + */ + hours?: number[]; + /** + * The days of the week. + */ + weekDays?: DayOfWeek[]; + /** + * The month days. + */ + monthDays?: number[]; + /** + * The monthly occurrences. + */ + monthlyOccurrences?: RecurrenceScheduleOccurrence[]; +} + +/** + * The recurrence schedule occurrence. + */ +export interface RecurrenceScheduleOccurrence { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The day of the week. + */ + day?: DayOfWeek; + /** + * The occurrence. + */ + occurrence?: number; +} + +/** + * Execution policy for an activity. + */ +export interface RetryPolicy { + /** + * Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0. + */ + count?: any; + /** + * Interval between retries in seconds. Default is 30. + */ + intervalInSeconds?: number; +} + +/** + * Referenced dependency. + */ +export interface DependencyReference { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: + | "TriggerDependencyReference" + | "TumblingWindowTriggerDependencyReference" + | "SelfDependencyTumblingWindowTriggerReference"; +} + +/** + * Trigger reference type. + */ +export interface TriggerReference { + /** + * Trigger reference type. + */ + type: TriggerReferenceType; + /** + * Reference trigger name. + */ + referenceName: string; +} + +/** + * The compute resource properties for managed integration runtime. + */ +export interface IntegrationRuntimeComputeProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities + */ + location?: string; + /** + * The node size requirement to managed integration runtime. + */ + nodeSize?: string; + /** + * The required number of nodes for managed integration runtime. + */ + numberOfNodes?: number; + /** + * Maximum parallel executions count per node for managed integration runtime. + */ + maxParallelExecutionsPerNode?: number; + /** + * Data flow properties for managed integration runtime. + */ + dataFlowProperties?: IntegrationRuntimeDataFlowProperties; + /** + * VNet properties for managed integration runtime. + */ + vNetProperties?: IntegrationRuntimeVNetProperties; +} + +/** + * Data flow properties for managed integration runtime. + */ +export interface IntegrationRuntimeDataFlowProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Compute type of the cluster which will execute data flow job. + */ + computeType?: DataFlowComputeType; + /** + * Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + */ + coreCount?: number; + /** + * Time to live (in minutes) setting of the cluster which will execute data flow job. + */ + timeToLive?: number; +} + +/** + * VNet properties for managed integration runtime. + */ +export interface IntegrationRuntimeVNetProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The ID of the VNet that this integration runtime will join. + */ + vNetId?: string; + /** + * The name of the subnet this integration runtime will join. + */ + subnet?: string; + /** + * Resource IDs of the public IP addresses that this integration runtime will use. + */ + publicIPs?: string[]; +} + +/** + * SSIS properties for managed integration runtime. + */ +export interface IntegrationRuntimeSsisProperties { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * Catalog information for managed dedicated integration runtime. + */ + catalogInfo?: IntegrationRuntimeSsisCatalogInfo; + /** + * License type for bringing your own license scenario. + */ + licenseType?: IntegrationRuntimeLicenseType; + /** + * Custom setup script properties for a managed dedicated integration runtime. + */ + customSetupScriptProperties?: IntegrationRuntimeCustomSetupScriptProperties; + /** + * Data proxy properties for a managed dedicated integration runtime. + */ + dataProxyProperties?: IntegrationRuntimeDataProxyProperties; + /** + * The edition for the SSIS Integration Runtime + */ + edition?: IntegrationRuntimeEdition; + /** + * Custom setup without script properties for a SSIS integration runtime. + */ + expressCustomSetupProperties?: CustomSetupBase[]; +} + +/** + * Catalog information for managed dedicated integration runtime. + */ +export interface IntegrationRuntimeSsisCatalogInfo { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The catalog database server URL. + */ + catalogServerEndpoint?: string; + /** + * The administrator user name of catalog database. + */ + catalogAdminUserName?: string; + /** + * The password of the administrator user account of the catalog database. + */ + catalogAdminPassword?: SecureString; + /** + * The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/ + */ + catalogPricingTier?: IntegrationRuntimeSsisCatalogPricingTier; +} + +/** + * Custom setup script properties for a managed dedicated integration runtime. + */ +export interface IntegrationRuntimeCustomSetupScriptProperties { + /** + * The URI of the Azure blob container that contains the custom setup script. + */ + blobContainerUri?: string; + /** + * The SAS token of the Azure blob container. + */ + sasToken?: SecureString; +} + +/** + * Data proxy properties for a managed dedicated integration runtime. + */ +export interface IntegrationRuntimeDataProxyProperties { + /** + * The self-hosted integration runtime reference. + */ + connectVia?: EntityReference; + /** + * The staging linked service reference. + */ + stagingLinkedService?: EntityReference; + /** + * The path to contain the staged data in the Blob storage. + */ + path?: string; +} + +/** + * The entity reference. + */ +export interface EntityReference { + /** + * The type of this referenced entity. + */ + type?: IntegrationRuntimeEntityReferenceType; + /** + * The name of this referenced entity. + */ + referenceName?: string; +} + +/** + * The base definition of the custom setup. + */ +export interface CustomSetupBase { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "undefined"; +} + +/** + * The base definition of a linked integration runtime. + */ +export interface LinkedIntegrationRuntimeType { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authorizationType: "Key" | "RBAC"; +} + +/** + * The storage account linked service. + */ +export type AzureStorageLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureStorage"; + /** + * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of accountKey in connection string. + */ + accountKey?: AzureKeyVaultSecretReference; + /** + * SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + sasUri?: any; + /** + * The Azure key vault secret reference of sasToken in sas uri. + */ + sasToken?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: string; +}; + +/** + * The azure blob storage linked service. + */ +export type AzureBlobStorageLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobStorage"; + /** + * The connection string. It is mutually exclusive with sasUri, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of accountKey in connection string. + */ + accountKey?: AzureKeyVaultSecretReference; + /** + * SAS URI of the Azure Blob Storage resource. It is mutually exclusive with connectionString, serviceEndpoint property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + sasUri?: any; + /** + * The Azure key vault secret reference of sasToken in sas uri. + */ + sasToken?: AzureKeyVaultSecretReference; + /** + * Blob service endpoint of the Azure Blob Storage resource. It is mutually exclusive with connectionString, sasUri property. + */ + serviceEndpoint?: string; + /** + * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against Azure SQL Data Warehouse. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: string; +}; + +/** + * The azure table storage linked service. + */ +export type AzureTableStorageLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureTableStorage"; + /** + * The connection string. It is mutually exclusive with sasUri property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of accountKey in connection string. + */ + accountKey?: AzureKeyVaultSecretReference; + /** + * SAS URI of the Azure Storage resource. It is mutually exclusive with connectionString property. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + sasUri?: any; + /** + * The Azure key vault secret reference of sasToken in sas uri. + */ + sasToken?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: string; +}; + +/** + * Azure SQL Data Warehouse linked service. + */ +export type AzureSqlDWLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSqlDW"; + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The ID of the service principal used to authenticate against Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against Azure SQL Data Warehouse. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * SQL Server linked service. + */ +export type SqlServerLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlServer"; + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The on-premises Windows authentication user name. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * The on-premises Windows authentication password. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Microsoft Azure SQL Database linked service. + */ +export type AzureSqlDatabaseLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSqlDatabase"; + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The ID of the service principal used to authenticate against Azure SQL Database. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against Azure SQL Database. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure SQL Managed Instance linked service. + */ +export type AzureSqlMILinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSqlMI"; + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The ID of the service principal used to authenticate against Azure SQL Managed Instance. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against Azure SQL Managed Instance. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Batch linked service. + */ +export type AzureBatchLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBatch"; + /** + * The Azure Batch account name. Type: string (or Expression with resultType string). + */ + accountName: any; + /** + * The Azure Batch account access key. + */ + accessKey?: SecretBaseUnion; + /** + * The Azure Batch URI. Type: string (or Expression with resultType string). + */ + batchUri: any; + /** + * The Azure Batch pool name. Type: string (or Expression with resultType string). + */ + poolName: any; + /** + * The Azure Storage linked service reference. + */ + linkedServiceName: LinkedServiceReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Key Vault linked service. + */ +export type AzureKeyVaultLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureKeyVault"; + /** + * The base URL of the Azure Key Vault. e.g. https://myakv.vault.azure.net Type: string (or Expression with resultType string). + */ + baseUrl: any; +}; + +/** + * Microsoft Azure Cosmos Database (CosmosDB) linked service. + */ +export type CosmosDbLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CosmosDb"; + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The endpoint of the Azure CosmosDB account. Type: string (or Expression with resultType string) + */ + accountEndpoint?: any; + /** + * The name of the database. Type: string (or Expression with resultType string) + */ + database?: any; + /** + * The account key of the Azure CosmosDB account. Type: SecureString or AzureKeyVaultSecretReference. + */ + accountKey?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Dynamics linked service. + */ +export type DynamicsLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Dynamics"; + /** + * The deployment type of the Dynamics instance. 'Online' for Dynamics Online and 'OnPremisesWithIfd' for Dynamics on-premises with Ifd. Type: string (or Expression with resultType string). + */ + deploymentType: DynamicsDeploymentType; + /** + * The host name of the on-premises Dynamics server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). + */ + hostName?: string; + /** + * The port of on-premises Dynamics server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: string; + /** + * The URL to the Microsoft Dynamics server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + */ + serviceUri?: string; + /** + * The organization name of the Dynamics instance. The property is required for on-prem and required for online when there are more than one Dynamics instances associated with the user. Type: string (or Expression with resultType string). + */ + organizationName?: string; + /** + * The authentication type to connect to Dynamics server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + */ + authenticationType: DynamicsAuthenticationType; + /** + * User name to access the Dynamics instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password to access the Dynamics instance. + */ + password?: SecretBaseUnion; + /** + * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + */ + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + /** + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. + */ + servicePrincipalCredential?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Dynamics CRM linked service. + */ +export type DynamicsCrmLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsCrm"; + /** + * The deployment type of the Dynamics CRM instance. 'Online' for Dynamics CRM Online and 'OnPremisesWithIfd' for Dynamics CRM on-premises with Ifd. Type: string (or Expression with resultType string). + */ + deploymentType: DynamicsDeploymentType; + /** + * The host name of the on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). + */ + hostName?: any; + /** + * The port of on-premises Dynamics CRM server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * The URL to the Microsoft Dynamics CRM server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + */ + serviceUri?: any; + /** + * The organization name of the Dynamics CRM instance. The property is required for on-prem and required for online when there are more than one Dynamics CRM instances associated with the user. Type: string (or Expression with resultType string). + */ + organizationName?: any; + /** + * The authentication type to connect to Dynamics CRM server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario, 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + */ + authenticationType: DynamicsAuthenticationType; + /** + * User name to access the Dynamics CRM instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password to access the Dynamics CRM instance. + */ + password?: SecretBaseUnion; + /** + * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + */ + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + /** + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. + */ + servicePrincipalCredential?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Common Data Service for Apps linked service. + */ +export type CommonDataServiceForAppsLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CommonDataServiceForApps"; + /** + * The deployment type of the Common Data Service for Apps instance. 'Online' for Common Data Service for Apps Online and 'OnPremisesWithIfd' for Common Data Service for Apps on-premises with Ifd. Type: string (or Expression with resultType string). + */ + deploymentType: DynamicsDeploymentType; + /** + * The host name of the on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Type: string (or Expression with resultType string). + */ + hostName?: any; + /** + * The port of on-premises Common Data Service for Apps server. The property is required for on-prem and not allowed for online. Default is 443. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * The URL to the Microsoft Common Data Service for Apps server. The property is required for on-line and not allowed for on-prem. Type: string (or Expression with resultType string). + */ + serviceUri?: any; + /** + * The organization name of the Common Data Service for Apps instance. The property is required for on-prem and required for online when there are more than one Common Data Service for Apps instances associated with the user. Type: string (or Expression with resultType string). + */ + organizationName?: any; + /** + * The authentication type to connect to Common Data Service for Apps server. 'Office365' for online scenario, 'Ifd' for on-premises with Ifd scenario. 'AADServicePrincipal' for Server-To-Server authentication in online scenario. Type: string (or Expression with resultType string). + */ + authenticationType: DynamicsAuthenticationType; + /** + * User name to access the Common Data Service for Apps instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password to access the Common Data Service for Apps instance. + */ + password?: SecretBaseUnion; + /** + * The client ID of the application in Azure Active Directory used for Server-To-Server authentication. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The service principal credential type to use in Server-To-Server authentication. 'ServicePrincipalKey' for key/secret, 'ServicePrincipalCert' for certificate. Type: string (or Expression with resultType string). + */ + servicePrincipalCredentialType?: DynamicsServicePrincipalCredentialType; + /** + * The credential of the service principal object in Azure Active Directory. If servicePrincipalCredentialType is 'ServicePrincipalKey', servicePrincipalCredential can be SecureString or AzureKeyVaultSecretReference. If servicePrincipalCredentialType is 'ServicePrincipalCert', servicePrincipalCredential can only be AzureKeyVaultSecretReference. + */ + servicePrincipalCredential?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * HDInsight linked service. + */ +export type HDInsightLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HDInsight"; + /** + * HDInsight cluster URI. Type: string (or Expression with resultType string). + */ + clusterUri: any; + /** + * HDInsight cluster user name. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * HDInsight cluster password. + */ + password?: SecretBaseUnion; + /** + * The Azure Storage linked service reference. + */ + linkedServiceName?: LinkedServiceReference; + /** + * A reference to the Azure SQL linked service that points to the HCatalog database. + */ + hcatalogLinkedServiceName?: LinkedServiceReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * Specify if the HDInsight is created with ESP (Enterprise Security Package). Type: Boolean. + */ + isEspEnabled?: any; + /** + * Specify the FileSystem if the main storage for the HDInsight is ADLS Gen2. Type: string (or Expression with resultType string). + */ + fileSystem?: any; +}; + +/** + * File system linked service. + */ +export type FileServerLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FileServer"; + /** + * Host name of the server. Type: string (or Expression with resultType string). + */ + host: any; + /** + * User ID to logon the server. Type: string (or Expression with resultType string). + */ + userId?: any; + /** + * Password to logon the server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure File Storage linked service. + */ +export type AzureFileStorageLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureFileStorage"; + /** + * Host name of the server. Type: string (or Expression with resultType string). + */ + host: any; + /** + * User ID to logon the server. Type: string (or Expression with resultType string). + */ + userId?: any; + /** + * Password to logon the server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Google Cloud Storage. + */ +export type GoogleCloudStorageLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleCloudStorage"; + /** + * The access key identifier of the Google Cloud Storage Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). + */ + accessKeyId?: any; + /** + * The secret access key of the Google Cloud Storage Identity and Access Management (IAM) user. + */ + secretAccessKey?: SecretBaseUnion; + /** + * This value specifies the endpoint to access with the Google Cloud Storage Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). + */ + serviceUrl?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Oracle database. + */ +export type OracleLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Oracle"; + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure MySQL database linked service. + */ +export type AzureMySqlLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMySql"; + /** + * The connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for MySQL data source. + */ +export type MySqlLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MySql"; + /** + * The connection string. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for PostgreSQL data source. + */ +export type PostgreSqlLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PostgreSql"; + /** + * The connection string. + */ + connectionString: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Sybase data source. + */ +export type SybaseLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Sybase"; + /** + * Server name for connection. Type: string (or Expression with resultType string). + */ + server: any; + /** + * Database name for connection. Type: string (or Expression with resultType string). + */ + database: any; + /** + * Schema name for connection. Type: string (or Expression with resultType string). + */ + schema?: any; + /** + * AuthenticationType to be used for connection. + */ + authenticationType?: SybaseAuthenticationType; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for DB2 data source. + */ +export type Db2LinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Db2"; + /** + * Server name for connection. Type: string (or Expression with resultType string). + */ + server: any; + /** + * Database name for connection. Type: string (or Expression with resultType string). + */ + database: any; + /** + * AuthenticationType to be used for connection. + */ + authenticationType?: Db2AuthenticationType; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * Under where packages are created when querying database. Type: string (or Expression with resultType string). + */ + packageCollection?: any; + /** + * Certificate Common Name when TLS is enabled. Type: string (or Expression with resultType string). + */ + certificateCommonName?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Teradata data source. + */ +export type TeradataLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Teradata"; + /** + * Teradata ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * Server name for connection. Type: string (or Expression with resultType string). + */ + server?: any; + /** + * AuthenticationType to be used for connection. + */ + authenticationType?: TeradataAuthenticationType; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure ML Studio Web Service linked service. + */ +export type AzureMLLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureML"; + /** + * The Batch Execution REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). + */ + mlEndpoint: any; + /** + * The API key for accessing the Azure ML model endpoint. + */ + apiKey: SecretBaseUnion; + /** + * The Update Resource REST URL for an Azure ML Studio Web Service endpoint. Type: string (or Expression with resultType string). + */ + updateResourceEndpoint?: any; + /** + * The ID of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against the ARM-based updateResourceEndpoint of an Azure ML Studio web service. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure ML Service linked service. + */ +export type AzureMLServiceLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMLService"; + /** + * Azure ML Service workspace subscription ID. Type: string (or Expression with resultType string). + */ + subscriptionId: any; + /** + * Azure ML Service workspace resource group name. Type: string (or Expression with resultType string). + */ + resourceGroupName: any; + /** + * Azure ML Service workspace name. Type: string (or Expression with resultType string). + */ + mlWorkspaceName: any; + /** + * The ID of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key of the service principal used to authenticate against the endpoint of a published Azure ML Service pipeline. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Open Database Connectivity (ODBC) linked service. + */ +export type OdbcLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Odbc"; + /** + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * Type of authentication used to connect to the ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The access credential portion of the connection string specified in driver-specific property-value format. + */ + credential?: SecretBaseUnion; + /** + * User name for Basic authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Informix linked service. + */ +export type InformixLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Informix"; + /** + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * Type of authentication used to connect to the Informix as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The access credential portion of the connection string specified in driver-specific property-value format. + */ + credential?: SecretBaseUnion; + /** + * User name for Basic authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Microsoft Access linked service. + */ +export type MicrosoftAccessLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MicrosoftAccess"; + /** + * The non-access credential portion of the connection string as well as an optional encrypted credential. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * Type of authentication used to connect to the Microsoft Access as ODBC data store. Possible values are: Anonymous and Basic. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The access credential portion of the connection string specified in driver-specific property-value format. + */ + credential?: SecretBaseUnion; + /** + * User name for Basic authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Hadoop Distributed File System (HDFS) linked service. + */ +export type HdfsLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Hdfs"; + /** + * The URL of the HDFS service endpoint, e.g. http://myhostname:50070/webhdfs/v1 . Type: string (or Expression with resultType string). + */ + url: any; + /** + * Type of authentication used to connect to the HDFS. Possible values are: Anonymous and Windows. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * User name for Windows authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Windows authentication. + */ + password?: SecretBaseUnion; +}; + +/** + * Open Data Protocol (OData) linked service. + */ +export type ODataLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OData"; + /** + * The URL of the OData service endpoint. Type: string (or Expression with resultType string). + */ + url: any; + /** + * Type of authentication used to connect to the OData service. + */ + authenticationType?: ODataAuthenticationType; + /** + * User name of the OData service. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password of the OData service. + */ + password?: SecretBaseUnion; + /** + * Specify the tenant information (domain name or tenant ID) under which your application resides. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * Specify the application id of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * Specify the resource you are requesting authorization to use Directory. Type: string (or Expression with resultType string). + */ + aadResourceId?: any; + /** + * Specify the credential type (key or cert) is used for service principal. + */ + aadServicePrincipalCredentialType?: ODataAadServicePrincipalCredentialType; + /** + * Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * Specify the base64 encoded certificate of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). + */ + servicePrincipalEmbeddedCert?: SecretBaseUnion; + /** + * Specify the password of your certificate if your certificate has a password and you are using AadServicePrincipal authentication. Type: string (or Expression with resultType string). + */ + servicePrincipalEmbeddedCertPassword?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Web linked service. + */ +export type WebLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Web"; + /** + * Web linked service properties. + */ + typeProperties: WebLinkedServiceTypePropertiesUnion; +}; + +/** + * Linked service for Cassandra data source. + */ +export type CassandraLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Cassandra"; + /** + * Host name for connection. Type: string (or Expression with resultType string). + */ + host: any; + /** + * AuthenticationType to be used for connection. Type: string (or Expression with resultType string). + */ + authenticationType?: any; + /** + * The port for the connection. Type: integer (or Expression with resultType integer). + */ + port?: any; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for MongoDb data source. + */ +export type MongoDbLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MongoDb"; + /** + * The IP address or server name of the MongoDB server. Type: string (or Expression with resultType string). + */ + server: any; + /** + * The authentication type to be used to connect to the MongoDB database. + */ + authenticationType?: MongoDbAuthenticationType; + /** + * The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). + */ + databaseName: any; + /** + * Username for authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * Password for authentication. + */ + password?: SecretBaseUnion; + /** + * Database to verify the username and password. Type: string (or Expression with resultType string). + */ + authSource?: any; + /** + * The TCP port number that the MongoDB server uses to listen for client connections. The default value is 27017. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableSsl?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. Type: boolean (or Expression with resultType boolean). + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for MongoDB data source. + */ +export type MongoDbV2LinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MongoDbV2"; + /** + * The MongoDB connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). + */ + database: any; +}; + +/** + * Linked service for CosmosDB (MongoDB API) data source. + */ +export type CosmosDbMongoDbApiLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CosmosDbMongoDbApi"; + /** + * The CosmosDB (MongoDB API) connection string. Type: string, SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString: any; + /** + * The name of the CosmosDB (MongoDB API) database that you want to access. Type: string (or Expression with resultType string). + */ + database: any; +}; + +/** + * Azure Data Lake Store linked service. + */ +export type AzureDataLakeStoreLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataLakeStore"; + /** + * Data Lake Store service URI. Type: string (or Expression with resultType string). + */ + dataLakeStoreUri: any; + /** + * The ID of the application used to authenticate against the Azure Data Lake Store account. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The Key of the application used to authenticate against the Azure Data Lake Store account. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * Data Lake Store account name. Type: string (or Expression with resultType string). + */ + accountName?: any; + /** + * Data Lake Store account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). + */ + subscriptionId?: any; + /** + * Data Lake Store account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). + */ + resourceGroupName?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Data Lake Storage Gen2 linked service. + */ +export type AzureBlobFSLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobFS"; + /** + * Endpoint for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). + */ + url: any; + /** + * Account key for the Azure Data Lake Storage Gen2 service. Type: string (or Expression with resultType string). + */ + accountKey?: any; + /** + * The ID of the application used to authenticate against the Azure Data Lake Storage Gen2 account. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The Key of the application used to authenticate against the Azure Data Lake Storage Gen2 account. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Office365 linked service. + */ +export type Office365LinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Office365"; + /** + * Azure tenant ID to which the Office 365 account belongs. Type: string (or Expression with resultType string). + */ + office365TenantId: any; + /** + * Specify the tenant information under which your Azure AD web application resides. Type: string (or Expression with resultType string). + */ + servicePrincipalTenantId: any; + /** + * Specify the application's client ID. Type: string (or Expression with resultType string). + */ + servicePrincipalId: any; + /** + * Specify the application's key. + */ + servicePrincipalKey: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Salesforce. + */ +export type SalesforceLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Salesforce"; + /** + * The URL of Salesforce instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + */ + environmentUrl?: any; + /** + * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * The password for Basic authentication of the Salesforce instance. + */ + password?: SecretBaseUnion; + /** + * The security token is required to remotely access Salesforce instance. + */ + securityToken?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Salesforce Service Cloud. + */ +export type SalesforceServiceCloudLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceServiceCloud"; + /** + * The URL of Salesforce Service Cloud instance. Default is 'https://login.salesforce.com'. To copy data from sandbox, specify 'https://test.salesforce.com'. To copy data from custom domain, specify, for example, 'https://[domain].my.salesforce.com'. Type: string (or Expression with resultType string). + */ + environmentUrl?: any; + /** + * The username for Basic authentication of the Salesforce instance. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * The password for Basic authentication of the Salesforce instance. + */ + password?: SecretBaseUnion; + /** + * The security token is required to remotely access Salesforce instance. + */ + securityToken?: SecretBaseUnion; + /** + * Extended properties appended to the connection string. Type: string (or Expression with resultType string). + */ + extendedProperties?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for SAP Cloud for Customer. + */ +export type SapCloudForCustomerLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapCloudForCustomer"; + /** + * The URL of SAP Cloud for Customer OData API. For example, '[https://[tenantname].crm.ondemand.com/sap/c4c/odata/v1]'. Type: string (or Expression with resultType string). + */ + url: any; + /** + * The username for Basic authentication. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * The password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for SAP ERP Central Component(SAP ECC). + */ +export type SapEccLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapEcc"; + /** + * The URL of SAP ECC OData API. For example, '[https://hostname:port/sap/opu/odata/sap/servicename/]'. Type: string (or Expression with resultType string). + */ + url: string; + /** + * The username for Basic authentication. Type: string (or Expression with resultType string). + */ + username?: string; + /** + * The password for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Either encryptedCredential or username/password must be provided. Type: string (or Expression with resultType string). + */ + encryptedCredential?: string; +}; + +/** + * SAP Business Warehouse Open Hub Destination Linked Service. + */ +export type SapOpenHubLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapOpenHub"; + /** + * Host name of the SAP BW instance where the open hub destination is located. Type: string (or Expression with resultType string). + */ + server: any; + /** + * System number of the BW system where the open hub destination is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). + */ + systemNumber: any; + /** + * Client ID of the client on the BW system where the open hub destination is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). + */ + clientId: any; + /** + * Language of the BW system where the open hub destination is located. The default value is EN. Type: string (or Expression with resultType string). + */ + language?: any; + /** + * Username to access the SAP BW server where the open hub destination is located. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to access the SAP BW server where the open hub destination is located. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Rest Service linked service. + */ +export type RestServiceLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "RestService"; + /** + * The base URL of the REST service. + */ + url: any; + /** + * Whether to validate server side SSL certificate when connecting to the endpoint.The default value is true. Type: boolean (or Expression with resultType boolean). + */ + enableServerCertificateValidation?: any; + /** + * Type of authentication used to connect to the REST service. + */ + authenticationType: RestServiceAuthenticationType; + /** + * The user name used in Basic authentication type. + */ + userName?: any; + /** + * The password used in Basic authentication type. + */ + password?: SecretBaseUnion; + /** + * The application's client ID used in AadServicePrincipal authentication type. + */ + servicePrincipalId?: any; + /** + * The application's key used in AadServicePrincipal authentication type. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The tenant information (domain name or tenant ID) used in AadServicePrincipal authentication type under which your application resides. + */ + tenant?: any; + /** + * The resource you are requesting authorization to use. + */ + aadResourceId?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Amazon S3. + */ +export type AmazonS3LinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonS3"; + /** + * The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). + */ + accessKeyId?: any; + /** + * The secret access key of the Amazon S3 Identity and Access Management (IAM) user. + */ + secretAccessKey?: SecretBaseUnion; + /** + * This value specifies the endpoint to access with the S3 Connector. This is an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). + */ + serviceUrl?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for Amazon Redshift. + */ +export type AmazonRedshiftLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonRedshift"; + /** + * The name of the Amazon Redshift server. Type: string (or Expression with resultType string). + */ + server: any; + /** + * The username of the Amazon Redshift source. Type: string (or Expression with resultType string). + */ + username?: any; + /** + * The password of the Amazon Redshift source. + */ + password?: SecretBaseUnion; + /** + * The database name of the Amazon Redshift source. Type: string (or Expression with resultType string). + */ + database: any; + /** + * The TCP port number that the Amazon Redshift server uses to listen for client connections. The default value is 5439. Type: integer (or Expression with resultType integer). + */ + port?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Custom linked service. + */ +export type CustomDataSourceLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CustomDataSource"; + /** + * Custom linked service properties. + */ + typeProperties: any; +}; + +/** + * Linked service for Windows Azure Search Service. + */ +export type AzureSearchLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSearch"; + /** + * URL for Azure Search service. Type: string (or Expression with resultType string). + */ + url: any; + /** + * Admin Key for Azure Search service + */ + key?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Linked service for an HTTP source. + */ +export type HttpLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HttpServer"; + /** + * The base URL of the HTTP endpoint, e.g. http://www.microsoft.com. Type: string (or Expression with resultType string). + */ + url: any; + /** + * The authentication type to be used to connect to the HTTP server. + */ + authenticationType?: HttpAuthenticationType; + /** + * User name for Basic, Digest, or Windows authentication. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password for Basic, Digest, Windows, or ClientCertificate with EmbeddedCertData authentication. + */ + password?: SecretBaseUnion; + /** + * Base64 encoded certificate data for ClientCertificate authentication. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). + */ + embeddedCertData?: any; + /** + * Thumbprint of certificate for ClientCertificate authentication. Only valid for on-premises copy. For on-premises copy with ClientCertificate authentication, either CertThumbprint or EmbeddedCertData/Password should be specified. Type: string (or Expression with resultType string). + */ + certThumbprint?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * If true, validate the HTTPS server SSL certificate. Default value is true. Type: boolean (or Expression with resultType boolean). + */ + enableServerCertificateValidation?: any; +}; + +/** + * A FTP server Linked Service. + */ +export type FtpServerLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FtpServer"; + /** + * Host name of the FTP server. Type: string (or Expression with resultType string). + */ + host: any; + /** + * The TCP port number that the FTP server uses to listen for client connections. Default value is 21. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * The authentication type to be used to connect to the FTP server. + */ + authenticationType?: FtpAuthenticationType; + /** + * Username to logon the FTP server. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to logon the FTP server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * If true, connect to the FTP server over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). + */ + enableSsl?: any; + /** + * If true, validate the FTP server SSL certificate when connect over SSL/TLS channel. Default value is true. Type: boolean (or Expression with resultType boolean). + */ + enableServerCertificateValidation?: any; +}; + +/** + * A linked service for an SSH File Transfer Protocol (SFTP) server. + */ +export type SftpServerLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Sftp"; + /** + * The SFTP server host name. Type: string (or Expression with resultType string). + */ + host: any; + /** + * The TCP port number that the SFTP server uses to listen for client connections. Default value is 22. Type: integer (or Expression with resultType integer), minimum: 0. + */ + port?: any; + /** + * The authentication type to be used to connect to the FTP server. + */ + authenticationType?: SftpAuthenticationType; + /** + * The username used to log on to the SFTP server. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to logon the SFTP server for Basic authentication. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * The SSH private key file path for SshPublicKey authentication. Only valid for on-premises copy. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. Type: string (or Expression with resultType string). + */ + privateKeyPath?: any; + /** + * Base64 encoded SSH private key content for SshPublicKey authentication. For on-premises copy with SshPublicKey authentication, either PrivateKeyPath or PrivateKeyContent should be specified. SSH private key should be OpenSSH format. + */ + privateKeyContent?: SecretBaseUnion; + /** + * The password to decrypt the SSH private key if the SSH private key is encrypted. + */ + passPhrase?: SecretBaseUnion; + /** + * If true, skip the SSH host key validation. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + skipHostKeyValidation?: any; + /** + * The host key finger-print of the SFTP server. When SkipHostKeyValidation is false, HostKeyFingerprint should be specified. Type: string (or Expression with resultType string). + */ + hostKeyFingerprint?: any; +}; + +/** + * SAP Business Warehouse Linked Service. + */ +export type SapBWLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapBW"; + /** + * Host name of the SAP BW instance. Type: string (or Expression with resultType string). + */ + server: any; + /** + * System number of the BW system. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). + */ + systemNumber: any; + /** + * Client ID of the client on the BW system. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). + */ + clientId: any; + /** + * Username to access the SAP BW server. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to access the SAP BW server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * SAP HANA Linked Service. + */ +export type SapHanaLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapHana"; + /** + * SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * Host name of the SAP HANA server. Type: string (or Expression with resultType string). + */ + server: any; + /** + * The authentication type to be used to connect to the SAP HANA server. + */ + authenticationType?: SapHanaAuthenticationType; + /** + * Username to access the SAP HANA server. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to access the SAP HANA server. + */ + password?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Amazon Marketplace Web Service linked service. + */ +export type AmazonMWSLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonMWS"; + /** + * The endpoint of the Amazon MWS server, (i.e. mws.amazonservices.com) + */ + endpoint: any; + /** + * The Amazon Marketplace ID you want to retrieve data from. To retrieve data from multiple Marketplace IDs, separate them with a comma (,). (i.e. A2EUQ1WTGCTBG2) + */ + marketplaceID: any; + /** + * The Amazon seller ID. + */ + sellerID: any; + /** + * The Amazon MWS authentication token. + */ + mwsAuthToken?: SecretBaseUnion; + /** + * The access key id used to access data. + */ + accessKeyId: any; + /** + * The secret key used to access data. + */ + secretKey?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure PostgreSQL linked service. + */ +export type AzurePostgreSqlLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzurePostgreSql"; + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + password?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Concur Service linked service. + */ +export type ConcurLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Concur"; + /** + * Application client_id supplied by Concur App Management. + */ + clientId: any; + /** + * The user name that you use to access Concur Service. + */ + username: any; + /** + * The password corresponding to the user name that you provided in the username field. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Couchbase server linked service. + */ +export type CouchbaseLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Couchbase"; + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of credString in connection string. + */ + credString?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Drill server linked service. + */ +export type DrillLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Drill"; + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Eloqua server linked service. + */ +export type EloquaLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Eloqua"; + /** + * The endpoint of the Eloqua server. (i.e. eloqua.example.com) + */ + endpoint: any; + /** + * The site name and user name of your Eloqua account in the form: sitename/username. (i.e. Eloqua/Alice) + */ + username: any; + /** + * The password corresponding to the user name. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Google BigQuery service linked service. + */ +export type GoogleBigQueryLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleBigQuery"; + /** + * The default BigQuery project to query against. + */ + project: any; + /** + * A comma-separated list of public BigQuery projects to access. + */ + additionalProjects?: any; + /** + * Whether to request access to Google Drive. Allowing Google Drive access enables support for federated tables that combine BigQuery data with data from Google Drive. The default value is false. + */ + requestGoogleDriveScope?: any; + /** + * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. + */ + authenticationType: GoogleBigQueryAuthenticationType; + /** + * The refresh token obtained from Google for authorizing access to BigQuery for UserAuthentication. + */ + refreshToken?: SecretBaseUnion; + /** + * The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). + */ + clientId?: any; + /** + * The client secret of the google application used to acquire the refresh token. + */ + clientSecret?: SecretBaseUnion; + /** + * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. + */ + email?: any; + /** + * The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. + */ + keyFilePath?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Greenplum Database linked service. + */ +export type GreenplumLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Greenplum"; + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * HBase server linked service. + */ +export type HBaseLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HBase"; + /** + * The IP address or host name of the HBase server. (i.e. 192.168.222.160) + */ + host: any; + /** + * The TCP port that the HBase instance uses to listen for client connections. The default value is 9090. + */ + port?: any; + /** + * The partial URL corresponding to the HBase server. (i.e. /gateway/sandbox/hbase/version) + */ + httpPath?: any; + /** + * The authentication mechanism to use to connect to the HBase server. + */ + authenticationType: HBaseAuthenticationType; + /** + * The user name used to connect to the HBase instance. + */ + username?: any; + /** + * The password corresponding to the user name. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Hive Server linked service. + */ +export type HiveLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Hive"; + /** + * IP address or host name of the Hive server, separated by ';' for multiple hosts (only when serviceDiscoveryMode is enable). + */ + host: any; + /** + * The TCP port that the Hive server uses to listen for client connections. + */ + port?: any; + /** + * The type of Hive server. + */ + serverType?: HiveServerType; + /** + * The transport protocol to use in the Thrift layer. + */ + thriftTransportProtocol?: HiveThriftTransportProtocol; + /** + * The authentication method used to access the Hive server. + */ + authenticationType: HiveAuthenticationType; + /** + * true to indicate using the ZooKeeper service, false not. + */ + serviceDiscoveryMode?: any; + /** + * The namespace on ZooKeeper under which Hive Server 2 nodes are added. + */ + zooKeeperNameSpace?: any; + /** + * Specifies whether the driver uses native HiveQL queries,or converts them into an equivalent form in HiveQL. + */ + useNativeQuery?: any; + /** + * The user name that you use to access Hive Server. + */ + username?: any; + /** + * The password corresponding to the user name that you provided in the Username field + */ + password?: SecretBaseUnion; + /** + * The partial URL corresponding to the Hive server. + */ + httpPath?: any; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Hubspot Service linked service. + */ +export type HubspotLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Hubspot"; + /** + * The client ID associated with your Hubspot application. + */ + clientId: any; + /** + * The client secret associated with your Hubspot application. + */ + clientSecret?: SecretBaseUnion; + /** + * The access token obtained when initially authenticating your OAuth integration. + */ + accessToken?: SecretBaseUnion; + /** + * The refresh token obtained when initially authenticating your OAuth integration. + */ + refreshToken?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Impala server linked service. + */ +export type ImpalaLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Impala"; + /** + * The IP address or host name of the Impala server. (i.e. 192.168.222.160) + */ + host: any; + /** + * The TCP port that the Impala server uses to listen for client connections. The default value is 21050. + */ + port?: any; + /** + * The authentication type to use. + */ + authenticationType: ImpalaAuthenticationType; + /** + * The user name used to access the Impala server. The default value is anonymous when using SASLUsername. + */ + username?: any; + /** + * The password corresponding to the user name when using UsernameAndPassword. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Jira Service linked service. + */ +export type JiraLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Jira"; + /** + * The IP address or host name of the Jira service. (e.g. jira.example.com) + */ + host: any; + /** + * The TCP port that the Jira server uses to listen for client connections. The default value is 443 if connecting through HTTPS, or 8080 if connecting through HTTP. + */ + port?: any; + /** + * The user name that you use to access Jira Service. + */ + username: any; + /** + * The password corresponding to the user name that you provided in the username field. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Magento server linked service. + */ +export type MagentoLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Magento"; + /** + * The URL of the Magento instance. (i.e. 192.168.222.110/magento3) + */ + host: any; + /** + * The access token from Magento. + */ + accessToken?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * MariaDB server linked service. + */ +export type MariaDBLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MariaDB"; + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Database for MariaDB linked service. + */ +export type AzureMariaDBLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMariaDB"; + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Marketo server linked service. + */ +export type MarketoLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Marketo"; + /** + * The endpoint of the Marketo server. (i.e. 123-ABC-321.mktorest.com) + */ + endpoint: any; + /** + * The client Id of your Marketo service. + */ + clientId: any; + /** + * The client secret of your Marketo service. + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Paypal Service linked service. + */ +export type PaypalLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Paypal"; + /** + * The URL of the PayPal instance. (i.e. api.sandbox.paypal.com) + */ + host: any; + /** + * The client ID associated with your PayPal application. + */ + clientId: any; + /** + * The client secret associated with your PayPal application. + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Phoenix server linked service. + */ +export type PhoenixLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Phoenix"; + /** + * The IP address or host name of the Phoenix server. (i.e. 192.168.222.160) + */ + host: any; + /** + * The TCP port that the Phoenix server uses to listen for client connections. The default value is 8765. + */ + port?: any; + /** + * The partial URL corresponding to the Phoenix server. (i.e. /gateway/sandbox/phoenix/version). The default value is hbasephoenix if using WindowsAzureHDInsightService. + */ + httpPath?: any; + /** + * The authentication mechanism used to connect to the Phoenix server. + */ + authenticationType: PhoenixAuthenticationType; + /** + * The user name used to connect to the Phoenix server. + */ + username?: any; + /** + * The password corresponding to the user name. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Presto server linked service. + */ +export type PrestoLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Presto"; + /** + * The IP address or host name of the Presto server. (i.e. 192.168.222.160) + */ + host: any; + /** + * The version of the Presto server. (i.e. 0.148-t) + */ + serverVersion: any; + /** + * The catalog context for all request against the server. + */ + catalog: any; + /** + * The TCP port that the Presto server uses to listen for client connections. The default value is 8080. + */ + port?: any; + /** + * The authentication mechanism used to connect to the Presto server. + */ + authenticationType: PrestoAuthenticationType; + /** + * The user name used to connect to the Presto server. + */ + username?: any; + /** + * The password corresponding to the user name. + */ + password?: SecretBaseUnion; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The local time zone used by the connection. Valid values for this option are specified in the IANA Time Zone Database. The default value is the system time zone. + */ + timeZoneID?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * QuickBooks server linked service. + */ +export type QuickBooksLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "QuickBooks"; + /** + * The endpoint of the QuickBooks server. (i.e. quickbooks.api.intuit.com) + */ + endpoint: any; + /** + * The company ID of the QuickBooks company to authorize. + */ + companyId: any; + /** + * The consumer key for OAuth 1.0 authentication. + */ + consumerKey: any; + /** + * The consumer secret for OAuth 1.0 authentication. + */ + consumerSecret: SecretBaseUnion; + /** + * The access token for OAuth 1.0 authentication. + */ + accessToken: SecretBaseUnion; + /** + * The access token secret for OAuth 1.0 authentication. + */ + accessTokenSecret: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * ServiceNow server linked service. + */ +export type ServiceNowLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ServiceNow"; + /** + * The endpoint of the ServiceNow server. (i.e. .service-now.com) + */ + endpoint: any; + /** + * The authentication type to use. + */ + authenticationType: ServiceNowAuthenticationType; + /** + * The user name used to connect to the ServiceNow server for Basic and OAuth2 authentication. + */ + username?: any; + /** + * The password corresponding to the user name for Basic and OAuth2 authentication. + */ + password?: SecretBaseUnion; + /** + * The client id for OAuth2 authentication. + */ + clientId?: any; + /** + * The client secret for OAuth2 authentication. + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Shopify Service linked service. + */ +export type ShopifyLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Shopify"; + /** + * The endpoint of the Shopify server. (i.e. mystore.myshopify.com) + */ + host: any; + /** + * The API access token that can be used to access Shopify’s data. The token won't expire if it is offline mode. + */ + accessToken?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Spark Server linked service. + */ +export type SparkLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Spark"; + /** + * IP address or host name of the Spark server + */ + host: any; + /** + * The TCP port that the Spark server uses to listen for client connections. + */ + port: any; + /** + * The type of Spark server. + */ + serverType?: SparkServerType; + /** + * The transport protocol to use in the Thrift layer. + */ + thriftTransportProtocol?: SparkThriftTransportProtocol; + /** + * The authentication method used to access the Spark server. + */ + authenticationType: SparkAuthenticationType; + /** + * The user name that you use to access Spark Server. + */ + username?: any; + /** + * The password corresponding to the user name that you provided in the Username field + */ + password?: SecretBaseUnion; + /** + * The partial URL corresponding to the Spark server. + */ + httpPath?: any; + /** + * Specifies whether the connections to the server are encrypted using SSL. The default value is false. + */ + enableSsl?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * Specifies whether to require a CA-issued SSL certificate name to match the host name of the server when connecting over SSL. The default value is false. + */ + allowHostNameCNMismatch?: any; + /** + * Specifies whether to allow self-signed certificates from the server. The default value is false. + */ + allowSelfSignedServerCert?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Square Service linked service. + */ +export type SquareLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Square"; + /** + * The URL of the Square instance. (i.e. mystore.mysquare.com) + */ + host: any; + /** + * The client ID associated with your Square application. + */ + clientId: any; + /** + * The client secret associated with your Square application. + */ + clientSecret?: SecretBaseUnion; + /** + * The redirect URL assigned in the Square application dashboard. (i.e. http://localhost:2500) + */ + redirectUri: any; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Xero Service linked service. + */ +export type XeroLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Xero"; + /** + * The endpoint of the Xero server. (i.e. api.xero.com) + */ + host: any; + /** + * The consumer key associated with the Xero application. + */ + consumerKey?: SecretBaseUnion; + /** + * The private key from the .pem file that was generated for your Xero private application. You must include all the text from the .pem file, including the Unix line endings( + * ). + */ + privateKey?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Zoho server linked service. + */ +export type ZohoLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Zoho"; + /** + * The endpoint of the Zoho server. (i.e. crm.zoho.com/crm/private) + */ + endpoint: any; + /** + * The access token for Zoho authentication. + */ + accessToken?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Vertica linked service. + */ +export type VerticaLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Vertica"; + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Netezza linked service. + */ +export type NetezzaLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Netezza"; + /** + * An ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. + */ + connectionString?: any; + /** + * The Azure key vault secret reference of password in connection string. + */ + pwd?: AzureKeyVaultSecretReference; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Salesforce Marketing Cloud linked service. + */ +export type SalesforceMarketingCloudLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceMarketingCloud"; + /** + * The client ID associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). + */ + clientId: any; + /** + * The client secret associated with the Salesforce Marketing Cloud application. Type: string (or Expression with resultType string). + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * HDInsight ondemand linked service. + */ +export type HDInsightOnDemandLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HDInsightOnDemand"; + /** + * Number of worker/data nodes in the cluster. Suggestion value: 4. Type: string (or Expression with resultType string). + */ + clusterSize: any; + /** + * The allowed idle time for the on-demand HDInsight cluster. Specifies how long the on-demand HDInsight cluster stays alive after completion of an activity run if there are no other active jobs in the cluster. The minimum value is 5 mins. Type: string (or Expression with resultType string). + */ + timeToLive: any; + /** + * Version of the HDInsight cluster.  Type: string (or Expression with resultType string). + */ + version: any; + /** + * Azure Storage linked service to be used by the on-demand cluster for storing and processing data. + */ + linkedServiceName: LinkedServiceReference; + /** + * The customer’s subscription to host the cluster. Type: string (or Expression with resultType string). + */ + hostSubscriptionId: any; + /** + * The service principal id for the hostSubscriptionId. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The key for the service principal id. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The Tenant id/name to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant: any; + /** + * The resource group where the cluster belongs. Type: string (or Expression with resultType string). + */ + clusterResourceGroup: any; + /** + * The prefix of cluster name, postfix will be distinct with timestamp. Type: string (or Expression with resultType string). + */ + clusterNamePrefix?: any; + /** + * The username to access the cluster. Type: string (or Expression with resultType string). + */ + clusterUserName?: any; + /** + * The password to access the cluster. + */ + clusterPassword?: SecretBaseUnion; + /** + * The username to SSH remotely connect to cluster’s node (for Linux). Type: string (or Expression with resultType string). + */ + clusterSshUserName?: any; + /** + * The password to SSH remotely connect cluster’s node (for Linux). + */ + clusterSshPassword?: SecretBaseUnion; + /** + * Specifies additional storage accounts for the HDInsight linked service so that the Data Factory service can register them on your behalf. + */ + additionalLinkedServiceNames?: LinkedServiceReference[]; + /** + * The name of Azure SQL linked service that point to the HCatalog database. The on-demand HDInsight cluster is created by using the Azure SQL database as the metastore. + */ + hcatalogLinkedServiceName?: LinkedServiceReference; + /** + * The cluster type. Type: string (or Expression with resultType string). + */ + clusterType?: any; + /** + * The version of spark if the cluster type is 'spark'. Type: string (or Expression with resultType string). + */ + sparkVersion?: any; + /** + * Specifies the core configuration parameters (as in core-site.xml) for the HDInsight cluster to be created. + */ + coreConfiguration?: any; + /** + * Specifies the HBase configuration parameters (hbase-site.xml) for the HDInsight cluster. + */ + hBaseConfiguration?: any; + /** + * Specifies the HDFS configuration parameters (hdfs-site.xml) for the HDInsight cluster. + */ + hdfsConfiguration?: any; + /** + * Specifies the hive configuration parameters (hive-site.xml) for the HDInsight cluster. + */ + hiveConfiguration?: any; + /** + * Specifies the MapReduce configuration parameters (mapred-site.xml) for the HDInsight cluster. + */ + mapReduceConfiguration?: any; + /** + * Specifies the Oozie configuration parameters (oozie-site.xml) for the HDInsight cluster. + */ + oozieConfiguration?: any; + /** + * Specifies the Storm configuration parameters (storm-site.xml) for the HDInsight cluster. + */ + stormConfiguration?: any; + /** + * Specifies the Yarn configuration parameters (yarn-site.xml) for the HDInsight cluster. + */ + yarnConfiguration?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; + /** + * Specifies the size of the head node for the HDInsight cluster. + */ + headNodeSize?: any; + /** + * Specifies the size of the data node for the HDInsight cluster. + */ + dataNodeSize?: any; + /** + * Specifies the size of the Zoo Keeper node for the HDInsight cluster. + */ + zookeeperNodeSize?: any; + /** + * Custom script actions to run on HDI ondemand cluster once it's up. Please refer to https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux?toc=%2Fen-us%2Fazure%2Fhdinsight%2Fr-server%2FTOC.json&bc=%2Fen-us%2Fazure%2Fbread%2Ftoc.json#understanding-script-actions. + */ + scriptActions?: ScriptAction[]; + /** + * The ARM resource ID for the vNet to which the cluster should be joined after creation. Type: string (or Expression with resultType string). + */ + virtualNetworkId?: any; + /** + * The ARM resource ID for the subnet in the vNet. If virtualNetworkId was specified, then this property is required. Type: string (or Expression with resultType string). + */ + subnetName?: any; +}; + +/** + * Azure Data Lake Analytics linked service. + */ +export type AzureDataLakeAnalyticsLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataLakeAnalytics"; + /** + * The Azure Data Lake Analytics account name. Type: string (or Expression with resultType string). + */ + accountName: any; + /** + * The ID of the application used to authenticate against the Azure Data Lake Analytics account. Type: string (or Expression with resultType string). + */ + servicePrincipalId?: any; + /** + * The Key of the application used to authenticate against the Azure Data Lake Analytics account. + */ + servicePrincipalKey?: SecretBaseUnion; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant: any; + /** + * Data Lake Analytics account subscription ID (if different from Data Factory account). Type: string (or Expression with resultType string). + */ + subscriptionId?: any; + /** + * Data Lake Analytics account resource group name (if different from Data Factory account). Type: string (or Expression with resultType string). + */ + resourceGroupName?: any; + /** + * Azure Data Lake Analytics URI Type: string (or Expression with resultType string). + */ + dataLakeAnalyticsUri?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Databricks linked service. + */ +export type AzureDatabricksLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDatabricks"; + /** + * .azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). + */ + domain: any; + /** + * Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). + */ + accessToken: SecretBaseUnion; + /** + * The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). + */ + existingClusterId?: any; + /** + * The id of an existing instance pool that will be used for all runs of this activity. Type: string (or Expression with resultType string). + */ + instancePoolId?: any; + /** + * If not using an existing interactive cluster, this specifies the Spark version of a new job cluster or instance pool nodes created for each run of this activity. Required if instancePoolId is specified. Type: string (or Expression with resultType string). + */ + newClusterVersion?: any; + /** + * If not using an existing interactive cluster, this specifies the number of worker nodes to use for the new job cluster or instance pool. For new job clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto-scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is specified. Type: string (or Expression with resultType string). + */ + newClusterNumOfWorker?: any; + /** + * The node type of the new job cluster. This property is required if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is specified, this property is ignored. Type: string (or Expression with resultType string). + */ + newClusterNodeType?: any; + /** + * A set of optional, user-specified Spark configuration key-value pairs. + */ + newClusterSparkConf?: { [propertyName: string]: any }; + /** + * A set of optional, user-specified Spark environment variables key-value pairs. + */ + newClusterSparkEnvVars?: { [propertyName: string]: any }; + /** + * Additional tags for cluster resources. This property is ignored in instance pool configurations. + */ + newClusterCustomTags?: { [propertyName: string]: any }; + /** + * The driver node type for the new job cluster. This property is ignored in instance pool configurations. Type: string (or Expression with resultType string). + */ + newClusterDriverNodeType?: any; + /** + * User-defined initialization scripts for the new cluster. Type: array of strings (or Expression with resultType array of strings). + */ + newClusterInitScripts?: any; + /** + * Enable the elastic disk on the new cluster. This property is now ignored, and takes the default elastic disk behavior in Databricks (elastic disks are always enabled). Type: boolean (or Expression with resultType boolean). + */ + newClusterEnableElasticDisk?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Responsys linked service. + */ +export type ResponsysLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Responsys"; + /** + * The endpoint of the Responsys server. + */ + endpoint: any; + /** + * The client ID associated with the Responsys application. Type: string (or Expression with resultType string). + */ + clientId: any; + /** + * The client secret associated with the Responsys application. Type: string (or Expression with resultType string). + */ + clientSecret?: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Dynamics AX linked service. + */ +export type DynamicsAXLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsAX"; + /** + * The Dynamics AX (or Dynamics 365 Finance and Operations) instance OData endpoint. + */ + url: any; + /** + * Specify the application's client ID. Type: string (or Expression with resultType string). + */ + servicePrincipalId: any; + /** + * Specify the application's key. Mark this field as a SecureString to store it securely in Data Factory, or reference a secret stored in Azure Key Vault. Type: string (or Expression with resultType string). + */ + servicePrincipalKey: SecretBaseUnion; + /** + * Specify the tenant information (domain name or tenant ID) under which your application resides. Retrieve it by hovering the mouse in the top-right corner of the Azure portal. Type: string (or Expression with resultType string). + */ + tenant: any; + /** + * Specify the resource you are requesting authorization. Type: string (or Expression with resultType string). + */ + aadResourceId: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Oracle Service Cloud linked service. + */ +export type OracleServiceCloudLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OracleServiceCloud"; + /** + * The URL of the Oracle Service Cloud instance. + */ + host: any; + /** + * The user name that you use to access Oracle Service Cloud server. + */ + username: any; + /** + * The password corresponding to the user name that you provided in the username key. + */ + password: SecretBaseUnion; + /** + * Specifies whether the data source endpoints are encrypted using HTTPS. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useEncryptedEndpoints?: any; + /** + * Specifies whether to require the host name in the server's certificate to match the host name of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + useHostVerification?: any; + /** + * Specifies whether to verify the identity of the server when connecting over SSL. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + usePeerVerification?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Google AdWords service linked service. + */ +export type GoogleAdWordsLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleAdWords"; + /** + * The Client customer ID of the AdWords account that you want to fetch report data for. + */ + clientCustomerID: any; + /** + * The developer token associated with the manager account that you use to grant access to the AdWords API. + */ + developerToken: SecretBaseUnion; + /** + * The OAuth 2.0 authentication mechanism used for authentication. ServiceAuthentication can only be used on self-hosted IR. + */ + authenticationType: GoogleAdWordsAuthenticationType; + /** + * The refresh token obtained from Google for authorizing access to AdWords for UserAuthentication. + */ + refreshToken?: SecretBaseUnion; + /** + * The client id of the google application used to acquire the refresh token. Type: string (or Expression with resultType string). + */ + clientId?: any; + /** + * The client secret of the google application used to acquire the refresh token. + */ + clientSecret?: SecretBaseUnion; + /** + * The service account email ID that is used for ServiceAuthentication and can only be used on self-hosted IR. + */ + email?: any; + /** + * The full path to the .p12 key file that is used to authenticate the service account email address and can only be used on self-hosted IR. + */ + keyFilePath?: any; + /** + * The full path of the .pem file containing trusted CA certificates for verifying the server when connecting over SSL. This property can only be set when using SSL on self-hosted IR. The default value is the cacerts.pem file installed with the IR. + */ + trustedCertPath?: any; + /** + * Specifies whether to use a CA certificate from the system trust store or from a specified PEM file. The default value is false. + */ + useSystemTrustStore?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * SAP Table Linked Service. + */ +export type SapTableLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapTable"; + /** + * Host name of the SAP instance where the table is located. Type: string (or Expression with resultType string). + */ + server?: any; + /** + * System number of the SAP system where the table is located. (Usually a two-digit decimal number represented as a string.) Type: string (or Expression with resultType string). + */ + systemNumber?: any; + /** + * Client ID of the client on the SAP system where the table is located. (Usually a three-digit decimal number represented as a string) Type: string (or Expression with resultType string). + */ + clientId?: any; + /** + * Language of the SAP system where the table is located. The default value is EN. Type: string (or Expression with resultType string). + */ + language?: any; + /** + * SystemID of the SAP system where the table is located. Type: string (or Expression with resultType string). + */ + systemId?: any; + /** + * Username to access the SAP server where the table is located. Type: string (or Expression with resultType string). + */ + userName?: any; + /** + * Password to access the SAP server where the table is located. + */ + password?: SecretBaseUnion; + /** + * The hostname of the SAP Message Server. Type: string (or Expression with resultType string). + */ + messageServer?: any; + /** + * The service name or port number of the Message Server. Type: string (or Expression with resultType string). + */ + messageServerService?: any; + /** + * SNC activation indicator to access the SAP server where the table is located. Must be either 0 (off) or 1 (on). Type: string (or Expression with resultType string). + */ + sncMode?: any; + /** + * Initiator's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). + */ + sncMyName?: any; + /** + * Communication partner's SNC name to access the SAP server where the table is located. Type: string (or Expression with resultType string). + */ + sncPartnerName?: any; + /** + * External security product's library to access the SAP server where the table is located. Type: string (or Expression with resultType string). + */ + sncLibraryPath?: any; + /** + * SNC Quality of Protection. Allowed value include: 1, 2, 3, 8, 9. Type: string (or Expression with resultType string). + */ + sncQop?: any; + /** + * The Logon Group for the SAP System. Type: string (or Expression with resultType string). + */ + logonGroup?: any; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * Azure Data Explorer (Kusto) linked service. + */ +export type AzureDataExplorerLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataExplorer"; + /** + * The endpoint of Azure Data Explorer (the engine's endpoint). URL will be in the format https://..kusto.windows.net. Type: string (or Expression with resultType string) + */ + endpoint: any; + /** + * The ID of the service principal used to authenticate against Azure Data Explorer. Type: string (or Expression with resultType string). + */ + servicePrincipalId: any; + /** + * The key of the service principal used to authenticate against Kusto. + */ + servicePrincipalKey: SecretBaseUnion; + /** + * Database name for connection. Type: string (or Expression with resultType string). + */ + database: any; + /** + * The name or ID of the tenant to which the service principal belongs. Type: string (or Expression with resultType string). + */ + tenant: any; +}; + +/** + * Azure Function linked service. + */ +export type AzureFunctionLinkedService = LinkedService & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureFunction"; + /** + * The endpoint of the Azure Function App. URL will be in the format https://.azurewebsites.net. + */ + functionAppUrl: any; + /** + * Function or Host key for Azure Function App. + */ + functionKey?: SecretBaseUnion; + /** + * The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). + */ + encryptedCredential?: any; +}; + +/** + * The resource model definition for an Azure Resource Manager resource with an etag. + */ +export type AzureEntityResource = Resource & { + /** + * Resource Etag. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly etag?: string; +}; + +/** + * A private endpoint connection + */ +export type PrivateEndpointConnection = Resource & { + /** + * The private endpoint which the connection belongs to. + */ + privateEndpoint?: PrivateEndpoint; + /** + * Connection state of the private endpoint connection. + */ + privateLinkServiceConnectionState?: PrivateLinkServiceConnectionState; + /** + * Provisioning state of the private endpoint connection. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly provisioningState?: string; +}; + +/** + * The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location' + */ +export type TrackedResource = Resource & { + /** + * Resource tags. + */ + tags?: { [propertyName: string]: string }; + /** + * The geo-location where the resource lives + */ + location: string; +}; + +/** + * The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location + */ +export type ProxyResource = Resource & {}; + +/** + * Avro dataset. + */ +export type AvroDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Avro"; + /** + * The location of the avro storage. + */ + location?: DatasetLocationUnion; + avroCompressionCodec?: AvroCompressionCodec; + avroCompressionLevel?: number; +}; + +/** + * Parquet dataset. + */ +export type ParquetDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Parquet"; + /** + * The location of the parquet storage. + */ + location?: DatasetLocationUnion; + compressionCodec?: ParquetCompressionCodec; +}; + +/** + * Delimited text dataset. + */ +export type DelimitedTextDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DelimitedText"; + /** + * The location of the delimited text storage. + */ + location?: DatasetLocationUnion; + /** + * The column delimiter. Type: string (or Expression with resultType string). + */ + columnDelimiter?: any; + /** + * The row delimiter. Type: string (or Expression with resultType string). + */ + rowDelimiter?: any; + /** + * The code page name of the preferred encoding. If miss, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). + */ + encodingName?: any; + compressionCodec?: DelimitedTextCompressionCodec; + /** + * The data compression method used for DelimitedText. + */ + compressionLevel?: DatasetCompressionLevel; + /** + * The quote character. Type: string (or Expression with resultType string). + */ + quoteChar?: any; + /** + * The escape character. Type: string (or Expression with resultType string). + */ + escapeChar?: any; + /** + * When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). + */ + firstRowAsHeader?: any; + /** + * The null value string. Type: string (or Expression with resultType string). + */ + nullValue?: any; +}; + +/** + * Json dataset. + */ +export type JsonDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Json"; + /** + * The location of the json data storage. + */ + location?: DatasetLocationUnion; + /** + * The code page name of the preferred encoding. If not specified, the default value is UTF-8, unless BOM denotes another Unicode encoding. Refer to the name column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). + */ + encodingName?: any; + /** + * The data compression method used for the json dataset. + */ + compression?: DatasetCompressionUnion; +}; + +/** + * ORC dataset. + */ +export type OrcDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Orc"; + /** + * The location of the ORC data storage. + */ + location?: DatasetLocationUnion; + orcCompressionCodec?: OrcCompressionCodec; +}; + +/** + * Binary dataset. + */ +export type BinaryDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Binary"; + /** + * The location of the Binary storage. + */ + location?: DatasetLocationUnion; + /** + * The data compression method used for the binary dataset. + */ + compression?: DatasetCompressionUnion; +}; + +/** + * The Azure Table storage dataset. + */ +export type AzureTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureTable"; + /** + * The table name of the Azure Table storage. Type: string (or Expression with resultType string). + */ + tableName: any; +}; + +/** + * The Azure SQL Server database dataset. + */ +export type AzureSqlTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSqlTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the Azure SQL database. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the Azure SQL database. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Azure SQL Managed Instance dataset. + */ +export type AzureSqlMITableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSqlMITable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the Azure SQL Managed Instance. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the Azure SQL Managed Instance dataset. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Azure SQL Data Warehouse dataset. + */ +export type AzureSqlDWTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSqlDWTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the Azure SQL Data Warehouse. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Cassandra database dataset. + */ +export type CassandraTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CassandraTable"; + /** + * The table name of the Cassandra database. Type: string (or Expression with resultType string). + */ + tableName?: any; + /** + * The keyspace of the Cassandra database. Type: string (or Expression with resultType string). + */ + keyspace?: any; +}; + +/** + * The custom dataset. + */ +export type CustomDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CustomDataset"; + /** + * Custom dataset properties. + */ + typeProperties?: any; +}; + +/** + * Microsoft Azure CosmosDB (SQL API) Collection dataset. + */ +export type CosmosDbSqlApiCollectionDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CosmosDbSqlApiCollection"; + /** + * CosmosDB (SQL API) collection name. Type: string (or Expression with resultType string). + */ + collectionName: any; +}; + +/** + * Microsoft Azure Document Database Collection dataset. + */ +export type DocumentDbCollectionDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DocumentDbCollection"; + /** + * Document Database collection name. Type: string (or Expression with resultType string). + */ + collectionName: any; +}; + +/** + * The Dynamics entity dataset. + */ +export type DynamicsEntityDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsEntity"; + /** + * The logical name of the entity. Type: string (or Expression with resultType string). + */ + entityName?: any; +}; + +/** + * The Dynamics CRM entity dataset. + */ +export type DynamicsCrmEntityDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsCrmEntity"; + /** + * The logical name of the entity. Type: string (or Expression with resultType string). + */ + entityName?: any; +}; + +/** + * The Common Data Service for Apps entity dataset. + */ +export type CommonDataServiceForAppsEntityDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CommonDataServiceForAppsEntity"; + /** + * The logical name of the entity. Type: string (or Expression with resultType string). + */ + entityName?: any; +}; + +/** + * The Office365 account. + */ +export type Office365Dataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Office365Table"; + /** + * Name of the dataset to extract from Office 365. Type: string (or Expression with resultType string). + */ + tableName: any; + /** + * A predicate expression that can be used to filter the specific rows to extract from Office 365. Type: string (or Expression with resultType string). + */ + predicate?: any; +}; + +/** + * The MongoDB database dataset. + */ +export type MongoDbCollectionDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MongoDbCollection"; + /** + * The table name of the MongoDB database. Type: string (or Expression with resultType string). + */ + collectionName: any; +}; + +/** + * The MongoDB database dataset. + */ +export type MongoDbV2CollectionDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MongoDbV2Collection"; + /** + * The collection name of the MongoDB database. Type: string (or Expression with resultType string). + */ + collection: any; +}; + +/** + * The CosmosDB (MongoDB API) database dataset. + */ +export type CosmosDbMongoDbApiCollectionDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CosmosDbMongoDbApiCollection"; + /** + * The collection name of the CosmosDB (MongoDB API) database. Type: string (or Expression with resultType string). + */ + collection: any; +}; + +/** + * The Open Data Protocol (OData) resource dataset. + */ +export type ODataResourceDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ODataResource"; + /** + * The OData resource path. Type: string (or Expression with resultType string). + */ + path?: any; +}; + +/** + * The on-premises Oracle database dataset. + */ +export type OracleTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OracleTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the on-premises Oracle database. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the on-premises Oracle database. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Teradata database dataset. + */ +export type TeradataTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TeradataTable"; + /** + * The database name of Teradata. Type: string (or Expression with resultType string). + */ + database?: any; + /** + * The table name of Teradata. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Azure MySQL database dataset. + */ +export type AzureMySqlTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMySqlTable"; + /** + * The Azure MySQL database table name. Type: string (or Expression with resultType string). + */ + tableName?: any; + /** + * The name of Azure MySQL database table. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The Amazon Redshift table dataset. + */ +export type AmazonRedshiftTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonRedshiftTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The Amazon Redshift table name. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The Amazon Redshift schema name. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * The Db2 table dataset. + */ +export type Db2TableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Db2Table"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The Db2 schema name. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The Db2 table name. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * The relational table dataset. + */ +export type RelationalTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "RelationalTable"; + /** + * The relational table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The Informix table dataset. + */ +export type InformixTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "InformixTable"; + /** + * The Informix table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The ODBC table dataset. + */ +export type OdbcTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OdbcTable"; + /** + * The ODBC table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The MySQL table dataset. + */ +export type MySqlTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MySqlTable"; + /** + * The MySQL table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The PostgreSQL table dataset. + */ +export type PostgreSqlTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PostgreSqlTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The PostgreSQL table name. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The PostgreSQL schema name. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * The Microsoft Access table dataset. + */ +export type MicrosoftAccessTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MicrosoftAccessTable"; + /** + * The Microsoft Access table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The Salesforce object dataset. + */ +export type SalesforceObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceObject"; + /** + * The Salesforce object API name. Type: string (or Expression with resultType string). + */ + objectApiName?: any; +}; + +/** + * The Salesforce Service Cloud object dataset. + */ +export type SalesforceServiceCloudObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceServiceCloudObject"; + /** + * The Salesforce Service Cloud object API name. Type: string (or Expression with resultType string). + */ + objectApiName?: any; +}; + +/** + * The Sybase table dataset. + */ +export type SybaseTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SybaseTable"; + /** + * The Sybase table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The SAP BW cube dataset. + */ +export type SapBwCubeDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapBwCube"; +}; + +/** + * The path of the SAP Cloud for Customer OData entity. + */ +export type SapCloudForCustomerResourceDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapCloudForCustomerResource"; + /** + * The path of the SAP Cloud for Customer OData entity. Type: string (or Expression with resultType string). + */ + path: any; +}; + +/** + * The path of the SAP ECC OData entity. + */ +export type SapEccResourceDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapEccResource"; + /** + * The path of the SAP ECC OData entity. Type: string (or Expression with resultType string). + */ + path: any; +}; + +/** + * SAP HANA Table properties. + */ +export type SapHanaTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapHanaTable"; + /** + * The schema name of SAP HANA. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of SAP HANA. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * Sap Business Warehouse Open Hub Destination Table properties. + */ +export type SapOpenHubTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapOpenHubTable"; + /** + * The name of the Open Hub Destination with destination type as Database Table. Type: string (or Expression with resultType string). + */ + openHubDestinationName: any; + /** + * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + excludeLastRequest?: any; + /** + * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). + */ + baseRequestId?: any; +}; + +/** + * The on-premises SQL Server dataset. + */ +export type SqlServerTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlServerTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The schema name of the SQL Server dataset. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; + /** + * The table name of the SQL Server dataset. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * A Rest service dataset. + */ +export type RestResourceDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "RestResource"; + /** + * The relative URL to the resource that the RESTful API provides. Type: string (or Expression with resultType string). + */ + relativeUrl?: any; + /** + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). + */ + requestMethod?: any; + /** + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). + */ + requestBody?: any; + /** + * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). + */ + additionalHeaders?: any; + /** + * The pagination rules to compose next page requests. Type: string (or Expression with resultType string). + */ + paginationRules?: any; +}; + +/** + * SAP Table Resource properties. + */ +export type SapTableResourceDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapTableResource"; + /** + * The name of the SAP Table. Type: string (or Expression with resultType string). + */ + tableName: any; +}; + +/** + * The dataset points to a HTML table in the web page. + */ +export type WebTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "WebTable"; + /** + * The zero-based index of the table in the web page. Type: integer (or Expression with resultType integer), minimum: 0. + */ + index: any; + /** + * The relative URL to the web page from the linked service URL. Type: string (or Expression with resultType string). + */ + path?: any; +}; + +/** + * The Azure Search Index. + */ +export type AzureSearchIndexDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSearchIndex"; + /** + * The name of the Azure Search Index. Type: string (or Expression with resultType string). + */ + indexName: any; +}; + +/** + * Amazon Marketplace Web Service dataset. + */ +export type AmazonMWSObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonMWSObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Azure PostgreSQL dataset. + */ +export type AzurePostgreSqlTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzurePostgreSqlTable"; + /** + * The table name of the Azure PostgreSQL database which includes both schema and table. Type: string (or Expression with resultType string). + */ + tableName?: any; + /** + * The table name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Azure PostgreSQL database. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Concur Service dataset. + */ +export type ConcurObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ConcurObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Couchbase server dataset. + */ +export type CouchbaseTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CouchbaseTable"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Drill server dataset. + */ +export type DrillTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DrillTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Drill. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Drill. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Eloqua server dataset. + */ +export type EloquaObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "EloquaObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Google BigQuery service dataset. + */ +export type GoogleBigQueryObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleBigQueryObject"; + /** + * This property will be retired. Please consider using database + table properties instead. + */ + tableName?: any; + /** + * The table name of the Google BigQuery. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The database name of the Google BigQuery. Type: string (or Expression with resultType string). + */ + dataset?: any; +}; + +/** + * Greenplum Database dataset. + */ +export type GreenplumTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GreenplumTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of Greenplum. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of Greenplum. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * HBase server dataset. + */ +export type HBaseObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HBaseObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Hive Server dataset. + */ +export type HiveObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HiveObject"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Hive. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Hive. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Hubspot Service dataset. + */ +export type HubspotObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HubspotObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Impala server dataset. + */ +export type ImpalaObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ImpalaObject"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Impala. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Impala. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Jira Service dataset. + */ +export type JiraObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "JiraObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Magento server dataset. + */ +export type MagentoObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MagentoObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * MariaDB server dataset. + */ +export type MariaDBTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MariaDBTable"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Azure Database for MariaDB dataset. + */ +export type AzureMariaDBTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMariaDBTable"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Marketo server dataset. + */ +export type MarketoObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MarketoObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Paypal Service dataset. + */ +export type PaypalObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PaypalObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Phoenix server dataset. + */ +export type PhoenixObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PhoenixObject"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Phoenix. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Phoenix. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Presto server dataset. + */ +export type PrestoObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PrestoObject"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Presto. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Presto. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * QuickBooks server dataset. + */ +export type QuickBooksObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "QuickBooksObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * ServiceNow server dataset. + */ +export type ServiceNowObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ServiceNowObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Shopify Service dataset. + */ +export type ShopifyObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ShopifyObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Spark Server dataset. + */ +export type SparkObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SparkObject"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Spark. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Spark. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Square Service dataset. + */ +export type SquareObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SquareObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Xero Service dataset. + */ +export type XeroObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "XeroObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Zoho server dataset. + */ +export type ZohoObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ZohoObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Netezza dataset. + */ +export type NetezzaTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "NetezzaTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Netezza. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Netezza. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Vertica dataset. + */ +export type VerticaTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "VerticaTable"; + /** + * This property will be retired. Please consider using schema + table properties instead. + */ + tableName?: any; + /** + * The table name of the Vertica. Type: string (or Expression with resultType string). + */ + table?: any; + /** + * The schema name of the Vertica. Type: string (or Expression with resultType string). + */ + schemaTypePropertiesSchema?: any; +}; + +/** + * Salesforce Marketing Cloud dataset. + */ +export type SalesforceMarketingCloudObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceMarketingCloudObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Responsys dataset. + */ +export type ResponsysObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ResponsysObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The path of the Dynamics AX OData entity. + */ +export type DynamicsAXResourceDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsAXResource"; + /** + * The path of the Dynamics AX OData entity. Type: string (or Expression with resultType string). + */ + path: any; +}; + +/** + * Oracle Service Cloud dataset. + */ +export type OracleServiceCloudObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OracleServiceCloudObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * The Azure Data Explorer (Kusto) dataset. + */ +export type AzureDataExplorerTableDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataExplorerTable"; + /** + * The table name of the Azure Data Explorer database. Type: string (or Expression with resultType string). + */ + table?: any; +}; + +/** + * Google AdWords service dataset. + */ +export type GoogleAdWordsObjectDataset = Dataset & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleAdWordsObject"; + /** + * The table name. Type: string (or Expression with resultType string). + */ + tableName?: any; +}; + +/** + * Base class for all control activities like IfCondition, ForEach , Until. + */ +export type ControlActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Container"; +}; + +/** + * Base class for all execution activities. + */ +export type ExecutionActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Execution"; + /** + * Linked service reference. + */ + linkedServiceName?: LinkedServiceReference; + /** + * Activity policy. + */ + policy?: ActivityPolicy; +}; + +/** + * Execute pipeline activity. + */ +export type ExecutePipelineActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ExecutePipeline"; + /** + * Pipeline reference. + */ + pipeline: PipelineReference; + /** + * Pipeline parameters. + */ + parameters?: { [propertyName: string]: any }; + /** + * Defines whether activity execution will wait for the dependent pipeline execution to finish. Default is false. + */ + waitOnCompletion?: boolean; +}; + +/** + * This activity evaluates a boolean expression and executes either the activities under the ifTrueActivities property or the ifFalseActivities property depending on the result of the expression. + */ +export type IfConditionActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "IfCondition"; + /** + * An expression that would evaluate to Boolean. This is used to determine the block of activities (ifTrueActivities or ifFalseActivities) that will be executed. + */ + expression: Expression; + /** + * List of activities to execute if expression is evaluated to true. This is an optional property and if not provided, the activity will exit without any action. + */ + ifTrueActivities?: ActivityUnion[]; + /** + * List of activities to execute if expression is evaluated to false. This is an optional property and if not provided, the activity will exit without any action. + */ + ifFalseActivities?: ActivityUnion[]; +}; + +/** + * This activity evaluates an expression and executes activities under the cases property that correspond to the expression evaluation expected in the equals property. + */ +export type SwitchActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Switch"; + /** + * An expression that would evaluate to a string or integer. This is used to determine the block of activities in cases that will be executed. + */ + on: Expression; + /** + * List of cases that correspond to expected values of the 'on' property. This is an optional property and if not provided, the activity will execute activities provided in defaultActivities. + */ + cases?: SwitchCase[]; + /** + * List of activities to execute if no case condition is satisfied. This is an optional property and if not provided, the activity will exit without any action. + */ + defaultActivities?: ActivityUnion[]; +}; + +/** + * This activity is used for iterating over a collection and execute given activities. + */ +export type ForEachActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ForEach"; + /** + * Should the loop be executed in sequence or in parallel (max 50) + */ + isSequential?: boolean; + /** + * Batch count to be used for controlling the number of parallel execution (when isSequential is set to false). + */ + batchCount?: number; + /** + * Collection to iterate. + */ + items: Expression; + /** + * List of activities to execute . + */ + activities: ActivityUnion[]; +}; + +/** + * This activity suspends pipeline execution for the specified interval. + */ +export type WaitActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Wait"; + /** + * Duration in seconds. + */ + waitTimeInSeconds: number; +}; + +/** + * This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. + */ +export type UntilActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Until"; + /** + * An expression that would evaluate to Boolean. The loop will continue until this expression evaluates to true + */ + expression: Expression; + /** + * Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + timeout?: any; + /** + * List of activities to execute. + */ + activities: ActivityUnion[]; +}; + +/** + * This activity verifies that an external resource exists. + */ +export type ValidationActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Validation"; + /** + * Specifies the timeout for the activity to run. If there is no value specified, it takes the value of TimeSpan.FromDays(7) which is 1 week as default. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + timeout?: any; + /** + * A delay in seconds between validation attempts. If no value is specified, 10 seconds will be used as the default. Type: integer (or Expression with resultType integer). + */ + sleep?: any; + /** + * Can be used if dataset points to a file. The file must be greater than or equal in size to the value specified. Type: integer (or Expression with resultType integer). + */ + minimumSize?: any; + /** + * Can be used if dataset points to a folder. If set to true, the folder must have at least one file. If set to false, the folder must be empty. Type: boolean (or Expression with resultType boolean). + */ + childItems?: any; + /** + * Validation activity dataset reference. + */ + dataset: DatasetReference; +}; + +/** + * Filter and return results from input array based on the conditions. + */ +export type FilterActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Filter"; + /** + * Input array on which filter should be applied. + */ + items: Expression; + /** + * Condition to be used for filtering the input. + */ + condition: Expression; +}; + +/** + * Set value for a Variable. + */ +export type SetVariableActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SetVariable"; + /** + * Name of the variable whose value needs to be set. + */ + variableName?: string; + /** + * Value to be set. Could be a static value or Expression + */ + value?: any; +}; + +/** + * Append value for a Variable of type Array. + */ +export type AppendVariableActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AppendVariable"; + /** + * Name of the variable whose value needs to be appended to. + */ + variableName?: string; + /** + * Value to be appended. Could be a static value or Expression + */ + value?: any; +}; + +/** + * WebHook activity. + */ +export type WebHookActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "WebHook"; + /** + * Rest API method for target endpoint. + */ + method: WebHookActivityMethod; + /** + * WebHook activity target endpoint and path. Type: string (or Expression with resultType string). + */ + url: any; + /** + * The timeout within which the webhook should be called back. If there is no value specified, it defaults to 10 minutes. Type: string. Pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + timeout?: string; + /** + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). + */ + headers?: any; + /** + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). + */ + body?: any; + /** + * Authentication method used for calling the endpoint. + */ + authentication?: WebActivityAuthentication; + /** + * When set to true, statusCode, output and error in callback request body will be consumed by activity. The activity can be marked as failed by setting statusCode >= 400 in callback request. Default is false. Type: boolean (or Expression with resultType boolean). + */ + reportStatusOnCallBack?: any; +}; + +/** + * Execute SQL pool stored procedure activity. + */ +export type SqlPoolStoredProcedureActivity = Activity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlPoolStoredProcedure"; + /** + * SQL pool stored procedure reference. + */ + sqlPool: SqlPoolReference; + /** + * Stored procedure name. Type: string (or Expression with resultType string). + */ + storedProcedureName: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +/** + * Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. + */ +export type RerunTumblingWindowTrigger = Trigger & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "RerunTumblingWindowTrigger"; + /** + * The parent trigger reference. + */ + parentTrigger?: any; + /** + * The start time for the time period for which restatement is initiated. Only UTC time is currently supported. + */ + requestedStartTime: Date; + /** + * The end time for the time period for which restatement is initiated. Only UTC time is currently supported. + */ + requestedEndTime: Date; + /** + * The max number of parallel time windows (ready for execution) for which a rerun is triggered. + */ + maxConcurrency: number; +}; + +/** + * Base class for all triggers that support one to many model for trigger to pipeline. + */ +export type MultiplePipelineTrigger = Trigger & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MultiplePipelineTrigger"; + /** + * Pipelines that need to be started. + */ + pipelines?: TriggerPipelineReference[]; +}; + +/** + * Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). + */ +export type TumblingWindowTrigger = Trigger & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TumblingWindowTrigger"; + /** + * Pipeline for which runs are created when an event is fired for trigger window that is ready. + */ + pipeline: TriggerPipelineReference; + /** + * The frequency of the time windows. + */ + frequency: TumblingWindowFrequency; + /** + * The interval of the time windows. The minimum interval allowed is 15 Minutes. + */ + interval: number; + /** + * The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. + */ + startTime: Date; + /** + * The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported. + */ + endTime?: Date; + /** + * Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + delay?: any; + /** + * The max number of parallel time windows (ready for execution) for which a new run is triggered. + */ + maxConcurrency: number; + /** + * Retry policy that will be applied for failed pipeline runs. + */ + retryPolicy?: RetryPolicy; + /** + * Triggers that this trigger depends on. Only tumbling window triggers are supported. + */ + dependsOn?: DependencyReferenceUnion[]; +}; + +/** + * Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. + */ +export type ChainingTrigger = Trigger & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ChainingTrigger"; + /** + * Pipeline for which runs are created when all upstream pipelines complete successfully. + */ + pipeline: TriggerPipelineReference; + /** + * Upstream Pipelines. + */ + dependsOn: PipelineReference[]; + /** + * Run Dimension property that needs to be emitted by upstream pipelines. + */ + runDimension: string; +}; + +/** + * Mapping data flow. + */ +export type MappingDataFlow = DataFlow & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MappingDataFlow"; + /** + * List of sources in data flow. + */ + sources?: DataFlowSource[]; + /** + * List of sinks in data flow. + */ + sinks?: DataFlowSink[]; + /** + * List of transformations in data flow. + */ + transformations?: Transformation[]; + /** + * DataFlow script. + */ + script?: string; +}; + +/** + * Data flow debug resource. + */ +export type DataFlowDebugResource = SubResourceDebugResource & { + /** + * Data flow properties. + */ + properties: DataFlowUnion; +}; + +/** + * Dataset debug resource. + */ +export type DatasetDebugResource = SubResourceDebugResource & { + /** + * Dataset properties. + */ + properties: DatasetUnion; +}; + +/** + * Linked service debug resource. + */ +export type LinkedServiceDebugResource = SubResourceDebugResource & { + /** + * Properties of linked service. + */ + properties: LinkedServiceUnion; +}; + +/** + * Managed integration runtime, including managed elastic and managed dedicated integration runtimes. + */ +export type ManagedIntegrationRuntime = IntegrationRuntime & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Managed"; + /** + * Integration runtime state, only valid for managed dedicated integration runtime. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly state?: IntegrationRuntimeState; + /** + * The compute resource for managed integration runtime. + */ + computeProperties?: IntegrationRuntimeComputeProperties; + /** + * SSIS properties for managed integration runtime. + */ + ssisProperties?: IntegrationRuntimeSsisProperties; +}; + +/** + * Self-hosted integration runtime. + */ +export type SelfHostedIntegrationRuntime = IntegrationRuntime & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SelfHosted"; + /** + * Linked integration runtime type from data factory + */ + linkedInfo?: LinkedIntegrationRuntimeTypeUnion; +}; + +/** + * Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. + */ +export type SecureString = SecretBase & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SecureString"; + /** + * Value of secure string. + */ + value: string; +}; + +/** + * Azure Key Vault secret reference. + */ +export type AzureKeyVaultSecretReference = SecretBase & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureKeyVaultSecret"; + /** + * The Azure Key Vault linked service reference. + */ + store: LinkedServiceReference; + /** + * The name of the secret in Azure Key Vault. Type: string (or Expression with resultType string). + */ + secretName: any; + /** + * The version of the secret in Azure Key Vault. The default value is the latest version of the secret. Type: string (or Expression with resultType string). + */ + secretVersion?: any; +}; + +/** + * Transformation for data flow source. + */ +export type DataFlowSource = Transformation & { + /** + * Dataset reference. + */ + dataset?: DatasetReference; +}; + +/** + * Transformation for data flow sink. + */ +export type DataFlowSink = Transformation & { + /** + * Dataset reference. + */ + dataset?: DatasetReference; +}; + +/** + * The location of azure blob dataset. + */ +export type AzureBlobStorageLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobStorageLocation"; + /** + * Specify the container of azure blob. Type: string (or Expression with resultType string). + */ + container?: any; +}; + +/** + * The location of azure blobFS dataset. + */ +export type AzureBlobFSLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobFSLocation"; + /** + * Specify the fileSystem of azure blobFS. Type: string (or Expression with resultType string). + */ + fileSystem?: any; +}; + +/** + * The location of azure data lake store dataset. + */ +export type AzureDataLakeStoreLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataLakeStoreLocation"; +}; + +/** + * The location of amazon S3 dataset. + */ +export type AmazonS3Location = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonS3Location"; + /** + * Specify the bucketName of amazon S3. Type: string (or Expression with resultType string) + */ + bucketName?: any; + /** + * Specify the version of amazon S3. Type: string (or Expression with resultType string). + */ + version?: any; +}; + +/** + * The location of file server dataset. + */ +export type FileServerLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FileServerLocation"; +}; + +/** + * The location of file server dataset. + */ +export type AzureFileStorageLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureFileStorageLocation"; +}; + +/** + * The location of Google Cloud Storage dataset. + */ +export type GoogleCloudStorageLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleCloudStorageLocation"; + /** + * Specify the bucketName of Google Cloud Storage. Type: string (or Expression with resultType string) + */ + bucketName?: any; + /** + * Specify the version of Google Cloud Storage. Type: string (or Expression with resultType string). + */ + version?: any; +}; + +/** + * The location of ftp server dataset. + */ +export type FtpServerLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FtpServerLocation"; +}; + +/** + * The location of SFTP dataset. + */ +export type SftpLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SftpLocation"; +}; + +/** + * The location of http server. + */ +export type HttpServerLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HttpServerLocation"; + /** + * Specify the relativeUrl of http server. Type: string (or Expression with resultType string) + */ + relativeUrl?: any; +}; + +/** + * The location of HDFS. + */ +export type HdfsLocation = DatasetLocation & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HdfsLocation"; +}; + +/** + * The data stored in text format. + */ +export type TextFormat = DatasetStorageFormat & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TextFormat"; + /** + * The column delimiter. Type: string (or Expression with resultType string). + */ + columnDelimiter?: any; + /** + * The row delimiter. Type: string (or Expression with resultType string). + */ + rowDelimiter?: any; + /** + * The escape character. Type: string (or Expression with resultType string). + */ + escapeChar?: any; + /** + * The quote character. Type: string (or Expression with resultType string). + */ + quoteChar?: any; + /** + * The null value string. Type: string (or Expression with resultType string). + */ + nullValue?: any; + /** + * The code page name of the preferred encoding. If miss, the default value is ΓÇ£utf-8ΓÇ¥, unless BOM denotes another Unicode encoding. Refer to the ΓÇ£NameΓÇ¥ column of the table in the following link to set supported values: https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with resultType string). + */ + encodingName?: any; + /** + * Treat empty column values in the text file as null. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + treatEmptyAsNull?: any; + /** + * The number of lines/rows to be skipped when parsing text files. The default value is 0. Type: integer (or Expression with resultType integer). + */ + skipLineCount?: any; + /** + * When used as input, treat the first row of data as headers. When used as output,write the headers into the output as the first row of data. The default value is false. Type: boolean (or Expression with resultType boolean). + */ + firstRowAsHeader?: any; +}; + +/** + * The data stored in JSON format. + */ +export type JsonFormat = DatasetStorageFormat & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "JsonFormat"; + /** + * File pattern of JSON. To be more specific, the way of separating a collection of JSON objects. The default value is 'setOfObjects'. It is case-sensitive. + */ + filePattern?: JsonFormatFilePattern; + /** + * The character used to separate nesting levels. Default value is '.' (dot). Type: string (or Expression with resultType string). + */ + nestingSeparator?: any; + /** + * The code page name of the preferred encoding. If not provided, the default value is 'utf-8', unless the byte order mark (BOM) denotes another Unicode encoding. The full list of supported values can be found in the 'Name' column of the table of encodings in the following reference: https://go.microsoft.com/fwlink/?linkid=861078. Type: string (or Expression with resultType string). + */ + encodingName?: any; + /** + * The JSONPath of the JSON array element to be flattened. Example: "$.ArrayPath". Type: string (or Expression with resultType string). + */ + jsonNodeReference?: any; + /** + * The JSONPath definition for each column mapping with a customized column name to extract data from JSON file. For fields under root object, start with "$"; for fields inside the array chosen by jsonNodeReference property, start from the array element. Example: {"Column1": "$.Column1Path", "Column2": "Column2PathInArray"}. Type: object (or Expression with resultType object). + */ + jsonPathDefinition?: any; +}; + +/** + * The data stored in Avro format. + */ +export type AvroFormat = DatasetStorageFormat & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AvroFormat"; +}; + +/** + * The data stored in Optimized Row Columnar (ORC) format. + */ +export type OrcFormat = DatasetStorageFormat & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OrcFormat"; +}; + +/** + * The data stored in Parquet format. + */ +export type ParquetFormat = DatasetStorageFormat & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ParquetFormat"; +}; + +/** + * The BZip2 compression method used on a dataset. + */ +export type DatasetBZip2Compression = DatasetCompression & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BZip2"; +}; + +/** + * The GZip compression method used on a dataset. + */ +export type DatasetGZipCompression = DatasetCompression & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GZip"; + /** + * The GZip compression level. + */ + level?: DatasetCompressionLevel; +}; + +/** + * The Deflate compression method used on a dataset. + */ +export type DatasetDeflateCompression = DatasetCompression & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Deflate"; + /** + * The Deflate compression level. + */ + level?: DatasetCompressionLevel; +}; + +/** + * The ZipDeflate compression method used on a dataset. + */ +export type DatasetZipDeflateCompression = DatasetCompression & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ZipDeflate"; + /** + * The ZipDeflate compression level. + */ + level?: DatasetCompressionLevel; +}; + +/** + * A WebLinkedService that uses anonymous authentication to communicate with an HTTP endpoint. + */ +export type WebAnonymousAuthentication = WebLinkedServiceTypeProperties & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authenticationType: "Anonymous"; +}; + +/** + * A WebLinkedService that uses basic authentication to communicate with an HTTP endpoint. + */ +export type WebBasicAuthentication = WebLinkedServiceTypeProperties & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authenticationType: "Basic"; + /** + * User name for Basic authentication. Type: string (or Expression with resultType string). + */ + username: any; + /** + * The password for Basic authentication. + */ + password: SecretBaseUnion; +}; + +/** + * A WebLinkedService that uses client certificate based authentication to communicate with an HTTP endpoint. This scheme follows mutual authentication; the server must also provide valid credentials to the client. + */ +export type WebClientCertificateAuthentication = WebLinkedServiceTypeProperties & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authenticationType: "ClientCertificate"; + /** + * Base64-encoded contents of a PFX file. + */ + pfx: SecretBaseUnion; + /** + * Password for the PFX file. + */ + password: SecretBaseUnion; +}; + +/** + * Azure blob read settings. + */ +export type AzureBlobStorageReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobStorageReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Azure blob wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Azure blob wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). + */ + prefix?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Azure blobFS read settings. + */ +export type AzureBlobFSReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobFSReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Azure data lake store read settings. + */ +export type AzureDataLakeStoreReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataLakeStoreReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * ADLS wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * ADLS wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Azure data lake store read settings. + */ +export type AmazonS3ReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonS3ReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * AmazonS3 wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * The prefix filter for the S3 object name. Type: string (or Expression with resultType string). + */ + prefix?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * File server read settings. + */ +export type FileServerReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FileServerReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * FileServer wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * FileServer wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Azure File Storage read settings. + */ +export type AzureFileStorageReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureFileStorageReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Google Cloud Storage read settings. + */ +export type GoogleCloudStorageReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleCloudStorageReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). + */ + prefix?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Ftp read settings. + */ +export type FtpReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FtpReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Ftp wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Ftp wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Specify whether to use binary transfer mode for FTP stores. + */ + useBinaryTransfer?: boolean; +}; + +/** + * Sftp read settings. + */ +export type SftpReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SftpReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Sftp wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * Sftp wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; +}; + +/** + * Sftp read settings. + */ +export type HttpReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HttpReadSettings"; + /** + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). + */ + requestMethod?: any; + /** + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). + */ + requestBody?: any; + /** + * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). + */ + additionalHeaders?: any; + /** + * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. + */ + requestTimeout?: any; +}; + +/** + * HDFS read settings. + */ +export type HdfsReadSettings = StoreReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HdfsReadSettings"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * HDFS wildcardFolderPath. Type: string (or Expression with resultType string). + */ + wildcardFolderPath?: any; + /** + * HDFS wildcardFileName. Type: string (or Expression with resultType string). + */ + wildcardFileName?: any; + /** + * Indicates whether to enable partition discovery. + */ + enablePartitionDiscovery?: boolean; + /** + * The start of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeStart?: any; + /** + * The end of file's modified datetime. Type: string (or Expression with resultType string). + */ + modifiedDatetimeEnd?: any; + /** + * Specifies Distcp-related settings. + */ + distcpSettings?: DistcpSettings; +}; + +/** + * Sftp write settings. + */ +export type SftpWriteSettings = StoreWriteSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SftpWriteSettings"; + /** + * Specifies the timeout for writing each chunk to SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). + */ + operationTimeout?: any; +}; + +/** + * Azure blob write settings. + */ +export type AzureBlobStorageWriteSettings = StoreWriteSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobStorageWriteSettings"; + /** + * Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). + */ + blockSizeInMB?: any; +}; + +/** + * Azure blobFS write settings. + */ +export type AzureBlobFSWriteSettings = StoreWriteSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobFSWriteSettings"; + /** + * Indicates the block size(MB) when writing data to blob. Type: integer (or Expression with resultType integer). + */ + blockSizeInMB?: any; +}; + +/** + * Azure data lake store write settings. + */ +export type AzureDataLakeStoreWriteSettings = StoreWriteSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataLakeStoreWriteSettings"; +}; + +/** + * File server write settings. + */ +export type FileServerWriteSettings = StoreWriteSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FileServerWriteSettings"; +}; + +/** + * Delimited text read settings. + */ +export type DelimitedTextReadSettings = FormatReadSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DelimitedTextReadSettings"; + /** + * Indicates the number of non-empty rows to skip when reading data from input files. Type: integer (or Expression with resultType integer). + */ + skipLineCount?: any; +}; + +/** + * Avro write settings. + */ +export type AvroWriteSettings = FormatWriteSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AvroWriteSettings"; + /** + * Top level record name in write result, which is required in AVRO spec. + */ + recordName?: string; + /** + * Record namespace in the write result. + */ + recordNamespace?: string; +}; + +/** + * Delimited text write settings. + */ +export type DelimitedTextWriteSettings = FormatWriteSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DelimitedTextWriteSettings"; + /** + * Indicates whether string values should always be enclosed with quotes. Type: boolean (or Expression with resultType boolean). + */ + quoteAllText?: any; + /** + * The file extension used to create the files. Type: string (or Expression with resultType string). + */ + fileExtension: any; +}; + +/** + * Json write settings. + */ +export type JsonWriteSettings = FormatWriteSettings & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "JsonWriteSettings"; + /** + * File pattern of JSON. This setting controls the way a collection of JSON objects will be treated. The default value is 'setOfObjects'. It is case-sensitive. + */ + filePattern?: JsonWriteFilePattern; +}; + +/** + * A copy activity Avro source. + */ +export type AvroSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AvroSource"; + /** + * Avro store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * A copy activity Parquet source. + */ +export type ParquetSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ParquetSource"; + /** + * Parquet store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * A copy activity DelimitedText source. + */ +export type DelimitedTextSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DelimitedTextSource"; + /** + * DelimitedText store settings. + */ + storeSettings?: StoreReadSettingsUnion; + /** + * DelimitedText format settings. + */ + formatSettings?: DelimitedTextReadSettings; +}; + +/** + * A copy activity Json source. + */ +export type JsonSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "JsonSource"; + /** + * Json store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * A copy activity ORC source. + */ +export type OrcSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OrcSource"; + /** + * ORC store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * A copy activity Binary source. + */ +export type BinarySource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BinarySource"; + /** + * Binary store settings. + */ + storeSettings?: StoreReadSettingsUnion; +}; + +/** + * Copy activity sources of tabular type. + */ +export type TabularSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TabularSource"; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; +}; + +/** + * A copy activity Azure Blob source. + */ +export type BlobSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BlobSource"; + /** + * Treat empty as null. Type: boolean (or Expression with resultType boolean). + */ + treatEmptyAsNull?: any; + /** + * Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). + */ + skipHeaderLineCount?: any; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; +}; + +/** + * A copy activity Document Database Collection source. + */ +export type DocumentDbCollectionSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DocumentDbCollectionSource"; + /** + * Documents query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * Nested properties separator. Type: string (or Expression with resultType string). + */ + nestingSeparator?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; +}; + +/** + * A copy activity Azure CosmosDB (SQL API) Collection source. + */ +export type CosmosDbSqlApiSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CosmosDbSqlApiSource"; + /** + * SQL API query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * Page size of the result. Type: integer (or Expression with resultType integer). + */ + pageSize?: any; + /** + * Preferred regions. Type: array of strings (or Expression with resultType array of strings). + */ + preferredRegions?: any; +}; + +/** + * A copy activity Dynamics source. + */ +export type DynamicsSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsSource"; + /** + * FetchXML is a proprietary query language that is used in Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Dynamics CRM source. + */ +export type DynamicsCrmSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsCrmSource"; + /** + * FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Common Data Service for Apps source. + */ +export type CommonDataServiceForAppsSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CommonDataServiceForAppsSource"; + /** + * FetchXML is a proprietary query language that is used in Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for various relational databases. + */ +export type RelationalSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "RelationalSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for Microsoft Access. + */ +export type MicrosoftAccessSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MicrosoftAccessSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for OData source. + */ +export type ODataSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ODataSource"; + /** + * OData query. For example, "$top=1". Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Salesforce Service Cloud source. + */ +export type SalesforceServiceCloudSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceServiceCloudSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The read behavior for the operation. Default is Query. + */ + readBehavior?: SalesforceSourceReadBehavior; +}; + +/** + * A copy activity Rest service source. + */ +export type RestSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "RestSource"; + /** + * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression with resultType string). + */ + requestMethod?: any; + /** + * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression with resultType string). + */ + requestBody?: any; + /** + * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression with resultType string). + */ + additionalHeaders?: any; + /** + * The pagination rules to compose next page requests. Type: string (or Expression with resultType string). + */ + paginationRules?: any; + /** + * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the timeout to read response data. Default value: 00:01:40. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + httpRequestTimeout?: any; + /** + * The time to await before sending next page request. + */ + requestInterval?: any; +}; + +/** + * A copy activity file system source. + */ +export type FileSystemSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FileSystemSource"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; +}; + +/** + * A copy activity HDFS source. + */ +export type HdfsSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HdfsSource"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * Specifies Distcp-related settings. + */ + distcpSettings?: DistcpSettings; +}; + +/** + * A copy activity Azure Data Explorer (Kusto) source. + */ +export type AzureDataExplorerSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataExplorerSource"; + /** + * Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with resultType string). + */ + query: any; + /** + * The name of the Boolean option that controls whether truncation is applied to result-sets that go beyond a certain row-count limit. + */ + noTruncation?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. + */ + queryTimeout?: any; +}; + +/** + * A copy activity Oracle source. + */ +export type OracleSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OracleSource"; + /** + * Oracle reader query. Type: string (or Expression with resultType string). + */ + oracleReaderQuery?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; + /** + * The partition mechanism that will be used for Oracle read in parallel. + */ + partitionOption?: OraclePartitionOption; + /** + * The settings that will be leveraged for Oracle source partitioning. + */ + partitionSettings?: OraclePartitionSettings; +}; + +/** + * A copy activity source for web page table. + */ +export type WebSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "WebSource"; +}; + +/** + * A copy activity source for a MongoDB database. + */ +export type MongoDbSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MongoDbSource"; + /** + * Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for a MongoDB database. + */ +export type MongoDbV2Source = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MongoDbV2Source"; + /** + * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). + */ + filter?: any; + /** + * Cursor methods for Mongodb query + */ + cursorMethods?: MongoDbCursorMethodsProperties; + /** + * Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). + */ + batchSize?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; +}; + +/** + * A copy activity source for a CosmosDB (MongoDB API) database. + */ +export type CosmosDbMongoDbApiSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CosmosDbMongoDbApiSource"; + /** + * Specifies selection filter using query operators. To return all documents in a collection, omit this parameter or pass an empty document ({}). Type: string (or Expression with resultType string). + */ + filter?: any; + /** + * Cursor methods for Mongodb query. + */ + cursorMethods?: MongoDbCursorMethodsProperties; + /** + * Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. Type: integer (or Expression with resultType integer). + */ + batchSize?: any; + /** + * Query timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + queryTimeout?: any; +}; + +/** + * A copy activity source for an Office 365 service. + */ +export type Office365Source = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Office365Source"; + /** + * The groups containing all the users. Type: array of strings (or Expression with resultType array of strings). + */ + allowedGroups?: any; + /** + * The user scope uri. Type: string (or Expression with resultType string). + */ + userScopeFilterUri?: any; + /** + * The Column to apply the and . Type: string (or Expression with resultType string). + */ + dateFilterColumn?: any; + /** + * Start time of the requested range for this dataset. Type: string (or Expression with resultType string). + */ + startTime?: any; + /** + * End time of the requested range for this dataset. Type: string (or Expression with resultType string). + */ + endTime?: any; + /** + * The columns to be read out from the Office 365 table. Type: array of objects (or Expression with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" } ] + */ + outputColumns?: any; +}; + +/** + * A copy activity Azure Data Lake source. + */ +export type AzureDataLakeStoreSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataLakeStoreSource"; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; +}; + +/** + * A copy activity Azure BlobFS source. + */ +export type AzureBlobFSSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobFSSource"; + /** + * Treat empty as null. Type: boolean (or Expression with resultType boolean). + */ + treatEmptyAsNull?: any; + /** + * Number of header lines to skip from each blob. Type: integer (or Expression with resultType integer). + */ + skipHeaderLineCount?: any; + /** + * If true, files under the folder path will be read recursively. Default is true. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; +}; + +/** + * A copy activity source for an HTTP file. + */ +export type HttpSource = CopySource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HttpSource"; + /** + * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + */ + httpRequestTimeout?: any; +}; + +/** + * A copy activity DelimitedText sink. + */ +export type DelimitedTextSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DelimitedTextSink"; + /** + * DelimitedText store settings. + */ + storeSettings?: StoreWriteSettingsUnion; + /** + * DelimitedText format settings. + */ + formatSettings?: DelimitedTextWriteSettings; +}; + +/** + * A copy activity Json sink. + */ +export type JsonSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "JsonSink"; + /** + * Json store settings. + */ + storeSettings?: StoreWriteSettingsUnion; + /** + * Json format settings. + */ + formatSettings?: JsonWriteSettings; +}; + +/** + * A copy activity ORC sink. + */ +export type OrcSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OrcSink"; + /** + * ORC store settings. + */ + storeSettings?: StoreWriteSettingsUnion; +}; + +/** + * A copy activity Azure PostgreSQL sink. + */ +export type AzurePostgreSqlSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzurePostgreSqlSink"; + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Azure MySql sink. + */ +export type AzureMySqlSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMySqlSink"; + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity SAP Cloud for Customer sink. + */ +export type SapCloudForCustomerSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapCloudForCustomerSink"; + /** + * The write behavior for the operation. Default is 'Insert'. + */ + writeBehavior?: SapCloudForCustomerSinkWriteBehavior; +}; + +/** + * A copy activity Azure Queue sink. + */ +export type AzureQueueSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureQueueSink"; +}; + +/** + * A copy activity Azure Table sink. + */ +export type AzureTableSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureTableSink"; + /** + * Azure Table default partition key value. Type: string (or Expression with resultType string). + */ + azureTableDefaultPartitionKeyValue?: any; + /** + * Azure Table partition key name. Type: string (or Expression with resultType string). + */ + azureTablePartitionKeyName?: any; + /** + * Azure Table row key name. Type: string (or Expression with resultType string). + */ + azureTableRowKeyName?: any; + /** + * Azure Table insert type. Type: string (or Expression with resultType string). + */ + azureTableInsertType?: any; +}; + +/** + * A copy activity Avro sink. + */ +export type AvroSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AvroSink"; + /** + * Avro store settings. + */ + storeSettings?: StoreWriteSettingsUnion; + /** + * Avro format settings. + */ + formatSettings?: AvroWriteSettings; +}; + +/** + * A copy activity Parquet sink. + */ +export type ParquetSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ParquetSink"; + /** + * Parquet store settings. + */ + storeSettings?: StoreWriteSettingsUnion; +}; + +/** + * A copy activity Binary sink. + */ +export type BinarySink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BinarySink"; + /** + * Binary store settings. + */ + storeSettings?: StoreWriteSettingsUnion; +}; + +/** + * A copy activity Azure Blob sink. + */ +export type BlobSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BlobSink"; + /** + * Blob writer overwrite files. Type: boolean (or Expression with resultType boolean). + */ + blobWriterOverwriteFiles?: any; + /** + * Blob writer date time format. Type: string (or Expression with resultType string). + */ + blobWriterDateTimeFormat?: any; + /** + * Blob writer add header. Type: boolean (or Expression with resultType boolean). + */ + blobWriterAddHeader?: any; + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; +}; + +/** + * A copy activity file system sink. + */ +export type FileSystemSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "FileSystemSink"; + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; +}; + +/** + * A copy activity Document Database Collection sink. + */ +export type DocumentDbCollectionSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DocumentDbCollectionSink"; + /** + * Nested properties separator. Default is . (dot). Type: string (or Expression with resultType string). + */ + nestingSeparator?: any; + /** + * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. + */ + writeBehavior?: any; +}; + +/** + * A copy activity Azure CosmosDB (SQL API) Collection sink. + */ +export type CosmosDbSqlApiSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CosmosDbSqlApiSink"; + /** + * Describes how to write data to Azure Cosmos DB. Type: string (or Expression with resultType string). Allowed values: insert and upsert. + */ + writeBehavior?: any; +}; + +/** + * A copy activity SQL sink. + */ +export type SqlSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlSink"; + /** + * SQL writer stored procedure name. Type: string (or Expression with resultType string). + */ + sqlWriterStoredProcedureName?: any; + /** + * SQL writer table type. Type: string (or Expression with resultType string). + */ + sqlWriterTableType?: any; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * SQL stored procedure parameters. + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). + */ + storedProcedureTableTypeParameterName?: any; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity SQL server sink. + */ +export type SqlServerSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlServerSink"; + /** + * SQL writer stored procedure name. Type: string (or Expression with resultType string). + */ + sqlWriterStoredProcedureName?: any; + /** + * SQL writer table type. Type: string (or Expression with resultType string). + */ + sqlWriterTableType?: any; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * SQL stored procedure parameters. + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). + */ + storedProcedureTableTypeParameterName?: any; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity Azure SQL sink. + */ +export type AzureSqlSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSqlSink"; + /** + * SQL writer stored procedure name. Type: string (or Expression with resultType string). + */ + sqlWriterStoredProcedureName?: any; + /** + * SQL writer table type. Type: string (or Expression with resultType string). + */ + sqlWriterTableType?: any; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * SQL stored procedure parameters. + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). + */ + storedProcedureTableTypeParameterName?: any; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity Azure SQL Managed Instance sink. + */ +export type SqlMISink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlMISink"; + /** + * SQL writer stored procedure name. Type: string (or Expression with resultType string). + */ + sqlWriterStoredProcedureName?: any; + /** + * SQL writer table type. Type: string (or Expression with resultType string). + */ + sqlWriterTableType?: any; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * SQL stored procedure parameters. + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * The stored procedure parameter name of the table type. Type: string (or Expression with resultType string). + */ + storedProcedureTableTypeParameterName?: any; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity SQL Data Warehouse sink. + */ +export type SqlDWSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlDWSink"; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; + /** + * Indicates to use PolyBase to copy data into SQL Data Warehouse when applicable. Type: boolean (or Expression with resultType boolean). + */ + allowPolyBase?: any; + /** + * Specifies PolyBase-related settings when allowPolyBase is true. + */ + polyBaseSettings?: PolybaseSettings; + /** + * Indicates to use Copy Command to copy data into SQL Data Warehouse. Type: boolean (or Expression with resultType boolean). + */ + allowCopyCommand?: any; + /** + * Specifies Copy Command related settings when allowCopyCommand is true. + */ + copyCommandSettings?: DWCopyCommandSettings; + /** + * The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). + */ + tableOption?: any; +}; + +/** + * A copy activity Oracle sink. + */ +export type OracleSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OracleSink"; + /** + * SQL pre-copy script. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Azure Data Lake Store sink. + */ +export type AzureDataLakeStoreSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataLakeStoreSink"; + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; + /** + * Single File Parallel. + */ + enableAdlsSingleFileParallel?: any; +}; + +/** + * A copy activity Azure Data Lake Storage Gen2 sink. + */ +export type AzureBlobFSSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureBlobFSSink"; + /** + * The type of copy behavior for copy sink. + */ + copyBehavior?: any; +}; + +/** + * A copy activity Azure Search Index sink. + */ +export type AzureSearchIndexSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSearchIndexSink"; + /** + * Specify the write behavior when upserting documents into Azure Search Index. + */ + writeBehavior?: AzureSearchIndexWriteBehaviorType; +}; + +/** + * A copy activity ODBC sink. + */ +export type OdbcSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OdbcSink"; + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Informix sink. + */ +export type InformixSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "InformixSink"; + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Microsoft Access sink. + */ +export type MicrosoftAccessSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MicrosoftAccessSink"; + /** + * A query to execute before starting the copy. Type: string (or Expression with resultType string). + */ + preCopyScript?: any; +}; + +/** + * A copy activity Dynamics sink. + */ +export type DynamicsSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsSink"; + /** + * The write behavior for the operation. + */ + writeBehavior: DynamicsSinkWriteBehavior; + /** + * The flag indicating whether ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; + /** + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). + */ + alternateKeyName?: any; +}; + +/** + * A copy activity Dynamics CRM sink. + */ +export type DynamicsCrmSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsCrmSink"; + /** + * The write behavior for the operation. + */ + writeBehavior: DynamicsSinkWriteBehavior; + /** + * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; + /** + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). + */ + alternateKeyName?: any; +}; + +/** + * A copy activity Common Data Service for Apps sink. + */ +export type CommonDataServiceForAppsSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CommonDataServiceForAppsSink"; + /** + * The write behavior for the operation. + */ + writeBehavior: DynamicsSinkWriteBehavior; + /** + * The flag indicating whether to ignore null values from input dataset (except key fields) during write operation. Default is false. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; + /** + * The logical name of the alternate key which will be used when upserting records. Type: string (or Expression with resultType string). + */ + alternateKeyName?: any; +}; + +/** + * A copy activity Azure Data Explorer sink. + */ +export type AzureDataExplorerSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataExplorerSink"; + /** + * A name of a pre-created csv mapping that was defined on the target Kusto table. Type: string. + */ + ingestionMappingName?: any; + /** + * An explicit column mapping description provided in a json format. Type: string. + */ + ingestionMappingAsJson?: any; + /** + * If set to true, any aggregation will be skipped. Default is false. Type: boolean. + */ + flushImmediately?: any; +}; + +/** + * A copy activity Salesforce sink. + */ +export type SalesforceSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceSink"; + /** + * The write behavior for the operation. Default is Insert. + */ + writeBehavior?: SalesforceSinkWriteBehavior; + /** + * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). + */ + externalIdFieldName?: any; + /** + * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; +}; + +/** + * A copy activity Salesforce Service Cloud sink. + */ +export type SalesforceServiceCloudSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceServiceCloudSink"; + /** + * The write behavior for the operation. Default is Insert. + */ + writeBehavior?: SalesforceSinkWriteBehavior; + /** + * The name of the external ID field for upsert operation. Default value is 'Id' column. Type: string (or Expression with resultType string). + */ + externalIdFieldName?: any; + /** + * The flag indicating whether or not to ignore null values from input dataset (except key fields) during write operation. Default value is false. If set it to true, it means ADF will leave the data in the destination object unchanged when doing upsert/update operation and insert defined default value when doing insert operation, versus ADF will update the data in the destination object to NULL when doing upsert/update operation and insert NULL value when doing insert operation. Type: boolean (or Expression with resultType boolean). + */ + ignoreNullValues?: any; +}; + +/** + * A copy activity sink for a CosmosDB (MongoDB API) database. + */ +export type CosmosDbMongoDbApiSink = CopySink & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CosmosDbMongoDbApiSink"; + /** + * Specifies whether the document with same key to be overwritten (upsert) rather than throw exception (insert). The default value is "insert". Type: string (or Expression with resultType string). Type: string (or Expression with resultType string). + */ + writeBehavior?: any; +}; + +/** + * A copy activity tabular translator. + */ +export type TabularTranslator = CopyTranslator & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TabularTranslator"; + /** + * Column mappings. Example: "UserId: MyUserId, Group: MyGroup, Name: MyName" Type: string (or Expression with resultType string). This property will be retired. Please use mappings property. + */ + columnMappings?: any; + /** + * The schema mapping to map between tabular data and hierarchical data. Example: {"Column1": "$.Column1", "Column2": "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or Expression with resultType object). This property will be retired. Please use mappings property. + */ + schemaMapping?: any; + /** + * The JSON Path of the Nested Array that is going to do cross-apply. Type: object (or Expression with resultType object). + */ + collectionReference?: any; + /** + * Whether to map complex (array and object) values to simple strings in json format. Type: boolean (or Expression with resultType boolean). + */ + mapComplexValuesToString?: any; + /** + * Column mappings with logical types. Tabular->tabular example: [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Hierarchical->tabular example: [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. Type: object (or Expression with resultType object). + */ + mappings?: any; +}; + +/** + * Trigger referenced dependency. + */ +export type TriggerDependencyReference = DependencyReference & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TriggerDependencyReference"; + /** + * Referenced trigger. + */ + referenceTrigger: TriggerReference; +}; + +/** + * Self referenced tumbling window trigger dependency. + */ +export type SelfDependencyTumblingWindowTriggerReference = DependencyReference & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SelfDependencyTumblingWindowTriggerReference"; + /** + * Timespan applied to the start time of a tumbling window when evaluating dependency. + */ + offset: string; + /** + * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. + */ + size?: string; +}; + +/** + * The key authorization type integration runtime. + */ +export type LinkedIntegrationRuntimeKeyAuthorization = LinkedIntegrationRuntimeType & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authorizationType: "Key"; + /** + * The key used for authorization. + */ + key: SecureString; +}; + +/** + * The role based access control (RBAC) authorization type integration runtime. + */ +export type LinkedIntegrationRuntimeRbacAuthorization = LinkedIntegrationRuntimeType & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + authorizationType: "RBAC"; + /** + * The resource identifier of the integration runtime to be shared. + */ + resourceId: string; +}; + +/** + * Linked service resource type. + */ +export type LinkedServiceResource = AzureEntityResource & { + /** + * Properties of linked service. + */ + properties: LinkedServiceUnion; +}; + +/** + * Dataset resource type. + */ +export type DatasetResource = AzureEntityResource & { + /** + * Dataset properties. + */ + properties: DatasetUnion; +}; + +/** + * Pipeline resource type. + */ +export type PipelineResource = AzureEntityResource & { + /** + * Describes unknown properties. The value of an unknown property can be of "any" type. + */ + [property: string]: any; + /** + * The description of the pipeline. + */ + description?: string; + /** + * List of activities in pipeline. + */ + activities?: ActivityUnion[]; + /** + * List of parameters for pipeline. + */ + parameters?: { [propertyName: string]: ParameterSpecification }; + /** + * List of variables for pipeline. + */ + variables?: { [propertyName: string]: VariableSpecification }; + /** + * The max number of concurrent runs for the pipeline. + */ + concurrency?: number; + /** + * List of tags that can be used for describing the Pipeline. + */ + annotations?: any[]; + /** + * Dimensions emitted by Pipeline. + */ + runDimensions?: { [propertyName: string]: any }; + /** + * The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + */ + folder?: PipelineFolder; +}; + +/** + * Trigger resource type. + */ +export type TriggerResource = AzureEntityResource & { + /** + * Properties of the trigger. + */ + properties: TriggerUnion; +}; + +/** + * Data flow resource type. + */ +export type DataFlowResource = AzureEntityResource & { + /** + * Data flow properties. + */ + properties: DataFlowUnion; +}; + +/** + * Spark job definition resource type. + */ +export type SparkJobDefinitionResource = AzureEntityResource & { + /** + * Properties of spark job definition. + */ + properties: SparkJobDefinition; +}; + +/** + * Integration runtime resource type. + */ +export type IntegrationRuntimeResource = AzureEntityResource & { + /** + * Integration runtime properties. + */ + properties: IntegrationRuntimeUnion; +}; + +/** + * Azure Synapse nested resource, which belongs to a workspace. + */ +export type SubResource = AzureEntityResource & {}; + +/** + * RerunTrigger resource type. + */ +export type RerunTriggerResource = AzureEntityResource & { + /** + * Properties of the rerun trigger. + */ + properties: RerunTumblingWindowTrigger; +}; + +/** + * A workspace + */ +export type Workspace = TrackedResource & { + /** + * Identity of the workspace + */ + identity?: ManagedIdentity; + /** + * Workspace default data lake storage account details + */ + defaultDataLakeStorage?: DataLakeStorageAccountDetails; + /** + * SQL administrator login password + */ + sqlAdministratorLoginPassword?: string; + /** + * Workspace managed resource group. The resource group name uniquely identifies the resource group within the user subscriptionId. The resource group name must be no longer than 90 characters long, and must be alphanumeric characters (Char.IsLetterOrDigit()) and '-', '_', '(', ')' and'.'. Note that the name cannot end with '.' + */ + managedResourceGroupName?: string; + /** + * Resource provisioning state + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly provisioningState?: string; + /** + * Login for workspace SQL active directory administrator + */ + sqlAdministratorLogin?: string; + /** + * Virtual Network profile + */ + virtualNetworkProfile?: VirtualNetworkProfile; + /** + * Connectivity endpoints + */ + connectivityEndpoints?: { [propertyName: string]: string }; + /** + * Setting this to 'default' will ensure that all compute for this workspace is in a virtual network managed on behalf of the user. + */ + managedVirtualNetwork?: string; + /** + * Private endpoint connections to the workspace + */ + privateEndpointConnections?: PrivateEndpointConnection[]; + /** + * The encryption details of the workspace + */ + encryption?: EncryptionDetails; + /** + * The workspace unique identifier + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly workspaceUID?: string; + /** + * Workspace level configs and feature flags + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly extraProperties?: { [propertyName: string]: any }; + /** + * Managed Virtual Network Settings + */ + managedVirtualNetworkSettings?: ManagedVirtualNetworkSettings; + /** + * Git integration settings + */ + workspaceRepositoryConfiguration?: WorkspaceRepositoryConfiguration; + /** + * Purview Configuration + */ + purviewConfiguration?: PurviewConfiguration; +}; + +/** + * A SQL Analytics pool + */ +export type SqlPool = TrackedResource & { + /** + * SQL pool SKU + */ + sku?: Sku; + /** + * Maximum size in bytes + */ + maxSizeBytes?: number; + /** + * Collation mode + */ + collation?: string; + /** + * Source database to create from + */ + sourceDatabaseId?: string; + /** + * Backup database to restore from + */ + recoverableDatabaseId?: string; + /** + * Resource state + */ + provisioningState?: string; + /** + * Resource status + */ + status?: string; + /** + * Snapshot time to restore + */ + restorePointInTime?: string; + /** + * What is this? + */ + createMode?: string; + /** + * Date the SQL pool was created + */ + creationDate?: Date; +}; + +/** + * A Big Data pool + */ +export type BigDataPoolResourceInfo = TrackedResource & { + /** + * The state of the Big Data pool. + */ + provisioningState?: string; + /** + * Auto-scaling properties + */ + autoScale?: AutoScaleProperties; + /** + * The time when the Big Data pool was created. + */ + creationDate?: Date; + /** + * Auto-pausing properties + */ + autoPause?: AutoPauseProperties; + /** + * Whether compute isolation is required or not. + */ + isComputeIsolationEnabled?: boolean; + /** + * Whether library requirements changed. + */ + haveLibraryRequirementsChanged?: boolean; + /** + * Whether session level packages enabled. + */ + sessionLevelPackagesEnabled?: boolean; + /** + * The Spark events folder + */ + sparkEventsFolder?: string; + /** + * The number of nodes in the Big Data pool. + */ + nodeCount?: number; + /** + * Library version requirements + */ + libraryRequirements?: LibraryRequirements; + /** + * Spark configuration file to specify additional properties + */ + sparkConfigProperties?: LibraryRequirements; + /** + * The Apache Spark version. + */ + sparkVersion?: string; + /** + * The default folder where Spark logs will be written. + */ + defaultSparkLogFolder?: string; + /** + * The level of compute power that each node in the Big Data pool has. + */ + nodeSize?: NodeSize; + /** + * The kind of nodes that the Big Data pool provides. + */ + nodeSizeFamily?: NodeSizeFamily; +}; + +/** + * Copy activity. + */ +export type CopyActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Copy"; + /** + * List of inputs for the activity. + */ + inputs?: DatasetReference[]; + /** + * List of outputs for the activity. + */ + outputs?: DatasetReference[]; + /** + * Copy activity source. + */ + source: CopySourceUnion; + /** + * Copy activity sink. + */ + sink: CopySinkUnion; + /** + * Copy activity translator. If not specified, tabular translator is used. + */ + translator?: any; + /** + * Specifies whether to copy data via an interim staging. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableStaging?: any; + /** + * Specifies interim staging settings when EnableStaging is true. + */ + stagingSettings?: StagingSettings; + /** + * Maximum number of concurrent sessions opened on the source or sink to avoid overloading the data store. Type: integer (or Expression with resultType integer), minimum: 0. + */ + parallelCopies?: any; + /** + * Maximum number of data integration units that can be used to perform this data movement. Type: integer (or Expression with resultType integer), minimum: 0. + */ + dataIntegrationUnits?: any; + /** + * Whether to skip incompatible row. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableSkipIncompatibleRow?: any; + /** + * Redirect incompatible row settings when EnableSkipIncompatibleRow is true. + */ + redirectIncompatibleRowSettings?: RedirectIncompatibleRowSettings; + /** + * Preserve Rules. + */ + preserveRules?: any[]; + /** + * Preserve rules. + */ + preserve?: any[]; +}; + +/** + * HDInsight Hive activity type. + */ +export type HDInsightHiveActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HDInsightHive"; + /** + * Storage linked service references. + */ + storageLinkedServices?: LinkedServiceReference[]; + /** + * User specified arguments to HDInsightActivity. + */ + arguments?: any[]; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * Script path. Type: string (or Expression with resultType string). + */ + scriptPath?: any; + /** + * Script linked service reference. + */ + scriptLinkedService?: LinkedServiceReference; + /** + * Allows user to specify defines for Hive job request. + */ + defines?: { [propertyName: string]: any }; + /** + * User specified arguments under hivevar namespace. + */ + variables?: any[]; + /** + * Query timeout value (in minutes). Effective when the HDInsight cluster is with ESP (Enterprise Security Package) + */ + queryTimeout?: number; +}; + +/** + * HDInsight Pig activity type. + */ +export type HDInsightPigActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HDInsightPig"; + /** + * Storage linked service references. + */ + storageLinkedServices?: LinkedServiceReference[]; + /** + * User specified arguments to HDInsightActivity. Type: array (or Expression with resultType array). + */ + arguments?: any; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * Script path. Type: string (or Expression with resultType string). + */ + scriptPath?: any; + /** + * Script linked service reference. + */ + scriptLinkedService?: LinkedServiceReference; + /** + * Allows user to specify defines for Pig job request. + */ + defines?: { [propertyName: string]: any }; +}; + +/** + * HDInsight MapReduce activity type. + */ +export type HDInsightMapReduceActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HDInsightMapReduce"; + /** + * Storage linked service references. + */ + storageLinkedServices?: LinkedServiceReference[]; + /** + * User specified arguments to HDInsightActivity. + */ + arguments?: any[]; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * Class name. Type: string (or Expression with resultType string). + */ + className: any; + /** + * Jar path. Type: string (or Expression with resultType string). + */ + jarFilePath: any; + /** + * Jar linked service reference. + */ + jarLinkedService?: LinkedServiceReference; + /** + * Jar libs. + */ + jarLibs?: any[]; + /** + * Allows user to specify defines for the MapReduce job request. + */ + defines?: { [propertyName: string]: any }; +}; + +/** + * HDInsight streaming activity type. + */ +export type HDInsightStreamingActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HDInsightStreaming"; + /** + * Storage linked service references. + */ + storageLinkedServices?: LinkedServiceReference[]; + /** + * User specified arguments to HDInsightActivity. + */ + arguments?: any[]; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * Mapper executable name. Type: string (or Expression with resultType string). + */ + mapper: any; + /** + * Reducer executable name. Type: string (or Expression with resultType string). + */ + reducer: any; + /** + * Input blob path. Type: string (or Expression with resultType string). + */ + input: any; + /** + * Output blob path. Type: string (or Expression with resultType string). + */ + output: any; + /** + * Paths to streaming job files. Can be directories. + */ + filePaths: any[]; + /** + * Linked service reference where the files are located. + */ + fileLinkedService?: LinkedServiceReference; + /** + * Combiner executable name. Type: string (or Expression with resultType string). + */ + combiner?: any; + /** + * Command line environment values. + */ + commandEnvironment?: any[]; + /** + * Allows user to specify defines for streaming job request. + */ + defines?: { [propertyName: string]: any }; +}; + +/** + * HDInsight Spark activity. + */ +export type HDInsightSparkActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HDInsightSpark"; + /** + * The root path in 'sparkJobLinkedService' for all the job’s files. Type: string (or Expression with resultType string). + */ + rootPath: any; + /** + * The relative path to the root folder of the code/package to be executed. Type: string (or Expression with resultType string). + */ + entryFilePath: any; + /** + * The user-specified arguments to HDInsightSparkActivity. + */ + arguments?: any[]; + /** + * Debug info option. + */ + getDebugInfo?: HDInsightActivityDebugInfoOption; + /** + * The storage linked service for uploading the entry file and dependencies, and for receiving logs. + */ + sparkJobLinkedService?: LinkedServiceReference; + /** + * The application's Java/Spark main class. + */ + className?: string; + /** + * The user to impersonate that will execute the job. Type: string (or Expression with resultType string). + */ + proxyUser?: any; + /** + * Spark configuration property. + */ + sparkConfig?: { [propertyName: string]: any }; +}; + +/** + * Execute SSIS package activity. + */ +export type ExecuteSsisPackageActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ExecuteSSISPackage"; + /** + * SSIS package location. + */ + packageLocation: SsisPackageLocation; + /** + * Specifies the runtime to execute SSIS package. The value should be "x86" or "x64". Type: string (or Expression with resultType string). + */ + runtime?: any; + /** + * The logging level of SSIS package execution. Type: string (or Expression with resultType string). + */ + loggingLevel?: any; + /** + * The environment path to execute the SSIS package. Type: string (or Expression with resultType string). + */ + environmentPath?: any; + /** + * The package execution credential. + */ + executionCredential?: SsisExecutionCredential; + /** + * The integration runtime reference. + */ + connectVia: IntegrationRuntimeReference; + /** + * The project level parameters to execute the SSIS package. + */ + projectParameters?: { [propertyName: string]: SsisExecutionParameter }; + /** + * The package level parameters to execute the SSIS package. + */ + packageParameters?: { [propertyName: string]: SsisExecutionParameter }; + /** + * The project level connection managers to execute the SSIS package. + */ + projectConnectionManagers?: { [propertyName: string]: any }; + /** + * The package level connection managers to execute the SSIS package. + */ + packageConnectionManagers?: { [propertyName: string]: any }; + /** + * The property overrides to execute the SSIS package. + */ + propertyOverrides?: { [propertyName: string]: SsisPropertyOverride }; + /** + * SSIS package execution log location. + */ + logLocation?: SsisLogLocation; +}; + +/** + * Custom activity type. + */ +export type CustomActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Custom"; + /** + * Command for custom activity Type: string (or Expression with resultType string). + */ + command: any; + /** + * Resource linked service reference. + */ + resourceLinkedService?: LinkedServiceReference; + /** + * Folder path for resource files Type: string (or Expression with resultType string). + */ + folderPath?: any; + /** + * Reference objects + */ + referenceObjects?: CustomActivityReferenceObject; + /** + * User defined property bag. There is no restriction on the keys or values that can be used. The user specified custom activity has the full responsibility to consume and interpret the content defined. + */ + extendedProperties?: { [propertyName: string]: any }; + /** + * The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). + */ + retentionTimeInDays?: any; +}; + +/** + * SQL stored procedure activity type. + */ +export type SqlServerStoredProcedureActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlServerStoredProcedure"; + /** + * Stored procedure name. Type: string (or Expression with resultType string). + */ + storedProcedureName: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +/** + * Delete activity. + */ +export type DeleteActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Delete"; + /** + * If true, files or sub-folders under current folder path will be deleted recursively. Default is false. Type: boolean (or Expression with resultType boolean). + */ + recursive?: any; + /** + * The max concurrent connections to connect data source at the same time. + */ + maxConcurrentConnections?: number; + /** + * Whether to record detailed logs of delete-activity execution. Default value is false. Type: boolean (or Expression with resultType boolean). + */ + enableLogging?: any; + /** + * Log storage settings customer need to provide when enableLogging is true. + */ + logStorageSettings?: LogStorageSettings; + /** + * Delete activity dataset reference. + */ + dataset: DatasetReference; +}; + +/** + * Azure Data Explorer command activity. + */ +export type AzureDataExplorerCommandActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureDataExplorerCommand"; + /** + * A control command, according to the Azure Data Explorer command syntax. Type: string (or Expression with resultType string). + */ + command: any; + /** + * Control command timeout. Type: string (or Expression with resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..) + */ + commandTimeout?: any; +}; + +/** + * Lookup activity. + */ +export type LookupActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Lookup"; + /** + * Dataset-specific source properties, same as copy activity source. + */ + source: CopySourceUnion; + /** + * Lookup activity dataset reference. + */ + dataset: DatasetReference; + /** + * Whether to return first row or all rows. Default value is true. Type: boolean (or Expression with resultType boolean). + */ + firstRowOnly?: any; +}; + +/** + * Web activity. + */ +export type WebActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "WebActivity"; + /** + * Rest API method for target endpoint. + */ + method: WebActivityMethod; + /** + * Web activity target endpoint and path. Type: string (or Expression with resultType string). + */ + url: any; + /** + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). + */ + headers?: any; + /** + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). + */ + body?: any; + /** + * Authentication method used for calling the endpoint. + */ + authentication?: WebActivityAuthentication; + /** + * List of datasets passed to web endpoint. + */ + datasets?: DatasetReference[]; + /** + * List of linked services passed to web endpoint. + */ + linkedServices?: LinkedServiceReference[]; + /** + * The integration runtime reference. + */ + connectVia?: IntegrationRuntimeReference; +}; + +/** + * Activity to get metadata of dataset + */ +export type GetMetadataActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GetMetadata"; + /** + * GetMetadata activity dataset reference. + */ + dataset: DatasetReference; + /** + * Fields of metadata to get from dataset. + */ + fieldList?: any[]; +}; + +/** + * Azure ML Batch Execution activity. + */ +export type AzureMLBatchExecutionActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMLBatchExecution"; + /** + * Key,Value pairs to be passed to the Azure ML Batch Execution Service endpoint. Keys must match the names of web service parameters defined in the published Azure ML web service. Values will be passed in the GlobalParameters property of the Azure ML batch execution request. + */ + globalParameters?: { [propertyName: string]: any }; + /** + * Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Outputs to AzureMLWebServiceFile objects specifying the output Blob locations. This information will be passed in the WebServiceOutputs property of the Azure ML batch execution request. + */ + webServiceOutputs?: { [propertyName: string]: AzureMLWebServiceFile }; + /** + * Key,Value pairs, mapping the names of Azure ML endpoint's Web Service Inputs to AzureMLWebServiceFile objects specifying the input Blob locations.. This information will be passed in the WebServiceInputs property of the Azure ML batch execution request. + */ + webServiceInputs?: { [propertyName: string]: AzureMLWebServiceFile }; +}; + +/** + * Azure ML Update Resource management activity. + */ +export type AzureMLUpdateResourceActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMLUpdateResource"; + /** + * Name of the Trained Model module in the Web Service experiment to be updated. Type: string (or Expression with resultType string). + */ + trainedModelName: any; + /** + * Name of Azure Storage linked service holding the .ilearner file that will be uploaded by the update operation. + */ + trainedModelLinkedServiceName: LinkedServiceReference; + /** + * The relative file path in trainedModelLinkedService to represent the .ilearner file that will be uploaded by the update operation. Type: string (or Expression with resultType string). + */ + trainedModelFilePath: any; +}; + +/** + * Azure ML Execute Pipeline activity. + */ +export type AzureMLExecutePipelineActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMLExecutePipeline"; + /** + * ID of the published Azure ML pipeline. Type: string (or Expression with resultType string). + */ + mlPipelineId: any; + /** + * Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). + */ + experimentName?: any; + /** + * Key,Value pairs to be passed to the published Azure ML pipeline endpoint. Keys must match the names of pipeline parameters defined in the published pipeline. Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). + */ + mlPipelineParameters?: any; + /** + * The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). + */ + mlParentRunId?: any; + /** + * Whether to continue execution of other steps in the PipelineRun if a step fails. This information will be passed in the continueOnStepFailure property of the published pipeline execution request. Type: boolean (or Expression with resultType boolean). + */ + continueOnStepFailure?: any; +}; + +/** + * Data Lake Analytics U-SQL activity. + */ +export type DataLakeAnalyticsUsqlActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DataLakeAnalyticsU-SQL"; + /** + * Case-sensitive path to folder that contains the U-SQL script. Type: string (or Expression with resultType string). + */ + scriptPath: any; + /** + * Script linked service reference. + */ + scriptLinkedService: LinkedServiceReference; + /** + * The maximum number of nodes simultaneously used to run the job. Default value is 1. Type: integer (or Expression with resultType integer), minimum: 1. + */ + degreeOfParallelism?: any; + /** + * Determines which jobs out of all that are queued should be selected to run first. The lower the number, the higher the priority. Default value is 1000. Type: integer (or Expression with resultType integer), minimum: 1. + */ + priority?: any; + /** + * Parameters for U-SQL job request. + */ + parameters?: { [propertyName: string]: any }; + /** + * Runtime version of the U-SQL engine to use. Type: string (or Expression with resultType string). + */ + runtimeVersion?: any; + /** + * Compilation mode of U-SQL. Must be one of these values : Semantic, Full and SingleBox. Type: string (or Expression with resultType string). + */ + compilationMode?: any; +}; + +/** + * DatabricksNotebook activity. + */ +export type DatabricksNotebookActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DatabricksNotebook"; + /** + * The absolute path of the notebook to be run in the Databricks Workspace. This path must begin with a slash. Type: string (or Expression with resultType string). + */ + notebookPath: any; + /** + * Base parameters to be used for each run of this job.If the notebook takes a parameter that is not specified, the default value from the notebook will be used. + */ + baseParameters?: { [propertyName: string]: any }; + /** + * A list of libraries to be installed on the cluster that will execute the job. + */ + libraries?: { [propertyName: string]: any }[]; +}; + +/** + * DatabricksSparkJar activity. + */ +export type DatabricksSparkJarActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DatabricksSparkJar"; + /** + * The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. Type: string (or Expression with resultType string). + */ + mainClassName: any; + /** + * Parameters that will be passed to the main method. + */ + parameters?: any[]; + /** + * A list of libraries to be installed on the cluster that will execute the job. + */ + libraries?: { [propertyName: string]: any }[]; +}; + +/** + * DatabricksSparkPython activity. + */ +export type DatabricksSparkPythonActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DatabricksSparkPython"; + /** + * The URI of the Python file to be executed. DBFS paths are supported. Type: string (or Expression with resultType string). + */ + pythonFile: any; + /** + * Command line parameters that will be passed to the Python file. + */ + parameters?: any[]; + /** + * A list of libraries to be installed on the cluster that will execute the job. + */ + libraries?: { [propertyName: string]: any }[]; +}; + +/** + * Azure Function activity. + */ +export type AzureFunctionActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureFunctionActivity"; + /** + * Rest API method for target endpoint. + */ + method: AzureFunctionActivityMethod; + /** + * Name of the Function that the Azure Function Activity will call. Type: string (or Expression with resultType string) + */ + functionName: any; + /** + * Represents the headers that will be sent to the request. For example, to set the language and type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type": "application/json" }. Type: string (or Expression with resultType string). + */ + headers?: any; + /** + * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not allowed for GET method Type: string (or Expression with resultType string). + */ + body?: any; +}; + +/** + * Execute data flow activity. + */ +export type ExecuteDataFlowActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ExecuteDataFlow"; + /** + * Data flow reference. + */ + dataFlow: DataFlowReference; + /** + * Staging info for execute data flow activity. + */ + staging?: DataFlowStagingInfo; + /** + * The integration runtime reference. + */ + integrationRuntime?: IntegrationRuntimeReference; + /** + * Compute properties for data flow activity. + */ + compute?: ExecuteDataFlowActivityTypePropertiesCompute; +}; + +/** + * Execute Synapse notebook activity. + */ +export type SynapseNotebookActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SynapseNotebook"; + /** + * Synapse notebook reference. + */ + notebook: SynapseNotebookReference; + /** + * Notebook parameters. + */ + parameters?: { [propertyName: string]: any }; +}; + +/** + * Execute spark job activity. + */ +export type SynapseSparkJobDefinitionActivity = ExecutionActivity & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SparkJob"; + /** + * Synapse spark job reference. + */ + sparkJob: SynapseSparkJobReference; +}; + +/** + * Trigger that creates pipeline runs periodically, on schedule. + */ +export type ScheduleTrigger = MultiplePipelineTrigger & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ScheduleTrigger"; + /** + * Recurrence schedule configuration. + */ + recurrence: ScheduleTriggerRecurrence; +}; + +/** + * Trigger that runs every time the selected Blob container changes. + */ +export type BlobTrigger = MultiplePipelineTrigger & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BlobTrigger"; + /** + * The path of the container/folder that will trigger the pipeline. + */ + folderPath: string; + /** + * The max number of parallel files to handle when it is triggered. + */ + maxConcurrency: number; + /** + * The Azure Storage linked service reference. + */ + linkedService: LinkedServiceReference; +}; + +/** + * Trigger that runs every time a Blob event occurs. + */ +export type BlobEventsTrigger = MultiplePipelineTrigger & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "BlobEventsTrigger"; + /** + * The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + */ + blobPathBeginsWith?: string; + /** + * The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + */ + blobPathEndsWith?: string; + /** + * If set to true, blobs with zero bytes will be ignored. + */ + ignoreEmptyBlobs?: boolean; + /** + * The type of events that cause this trigger to fire. + */ + events: BlobEventType[]; + /** + * The ARM resource ID of the Storage Account. + */ + scope: string; +}; + +/** + * A copy activity Azure Table source. + */ +export type AzureTableSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureTableSource"; + /** + * Azure Table source query. Type: string (or Expression with resultType string). + */ + azureTableSourceQuery?: any; + /** + * Azure Table source ignore table not found. Type: boolean (or Expression with resultType boolean). + */ + azureTableSourceIgnoreTableNotFound?: any; +}; + +/** + * A copy activity source for Informix. + */ +export type InformixSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "InformixSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for Db2 databases. + */ +export type Db2Source = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "Db2Source"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for ODBC databases. + */ +export type OdbcSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OdbcSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for MySQL databases. + */ +export type MySqlSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MySqlSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for PostgreSQL databases. + */ +export type PostgreSqlSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PostgreSqlSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for Sybase databases. + */ +export type SybaseSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SybaseSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for SapBW server via MDX. + */ +export type SapBwSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapBwSource"; + /** + * MDX query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Salesforce source. + */ +export type SalesforceSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The read behavior for the operation. Default is Query. + */ + readBehavior?: SalesforceSourceReadBehavior; +}; + +/** + * A copy activity source for SAP Cloud for Customer source. + */ +export type SapCloudForCustomerSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapCloudForCustomerSource"; + /** + * SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for SAP ECC source. + */ +export type SapEccSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapEccSource"; + /** + * SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for SAP HANA source. + */ +export type SapHanaSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapHanaSource"; + /** + * SAP HANA Sql query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The packet size of data read from SAP HANA. Type: integer(or Expression with resultType integer). + */ + packetSize?: any; + /** + * The partition mechanism that will be used for SAP HANA read in parallel. + */ + partitionOption?: SapHanaPartitionOption; + /** + * The settings that will be leveraged for SAP HANA source partitioning. + */ + partitionSettings?: SapHanaPartitionSettings; +}; + +/** + * A copy activity source for SAP Business Warehouse Open Hub Destination source. + */ +export type SapOpenHubSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapOpenHubSource"; + /** + * Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). + */ + excludeLastRequest?: any; + /** + * The ID of request for delta loading. Once it is set, only data with requestId larger than the value of this property will be retrieved. The default value is 0. Type: integer (or Expression with resultType integer ). + */ + baseRequestId?: any; +}; + +/** + * A copy activity source for SAP Table source. + */ +export type SapTableSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SapTableSource"; + /** + * The number of rows to be retrieved. Type: integer(or Expression with resultType integer). + */ + rowCount?: any; + /** + * The number of rows that will be skipped. Type: integer (or Expression with resultType integer). + */ + rowSkips?: any; + /** + * The fields of the SAP table that will be retrieved. For example, column0, column1. Type: string (or Expression with resultType string). + */ + rfcTableFields?: any; + /** + * The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type: string (or Expression with resultType string). + */ + rfcTableOptions?: any; + /** + * Specifies the maximum number of rows that will be retrieved at a time when retrieving data from SAP Table. Type: integer (or Expression with resultType integer). + */ + batchSize?: any; + /** + * Specifies the custom RFC function module that will be used to read data from SAP Table. Type: string (or Expression with resultType string). + */ + customRfcReadTableFunctionModule?: any; + /** + * The partition mechanism that will be used for SAP table read in parallel. + */ + partitionOption?: SapTablePartitionOption; + /** + * The settings that will be leveraged for SAP table source partitioning. + */ + partitionSettings?: SapTablePartitionSettings; +}; + +/** + * A copy activity SQL source. + */ +export type SqlSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlSource"; + /** + * SQL reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; +}; + +/** + * A copy activity SQL server source. + */ +export type SqlServerSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlServerSource"; + /** + * SQL reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * Which additional types to produce. + */ + produceAdditionalTypes?: any; +}; + +/** + * A copy activity Azure SQL source. + */ +export type AzureSqlSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureSqlSource"; + /** + * SQL reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a SQL Database source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * Which additional types to produce. + */ + produceAdditionalTypes?: any; +}; + +/** + * A copy activity Azure SQL Managed Instance source. + */ +export type SqlMISource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlMISource"; + /** + * SQL reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". + */ + storedProcedureParameters?: { + [propertyName: string]: StoredProcedureParameter; + }; + /** + * Which additional types to produce. + */ + produceAdditionalTypes?: any; +}; + +/** + * A copy activity SQL Data Warehouse source. + */ +export type SqlDWSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SqlDWSource"; + /** + * SQL Data Warehouse reader query. Type: string (or Expression with resultType string). + */ + sqlReaderQuery?: any; + /** + * Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same time as SqlReaderQuery. Type: string (or Expression with resultType string). + */ + sqlReaderStoredProcedureName?: any; + /** + * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". Type: object (or Expression with resultType object), itemType: StoredProcedureParameter. + */ + storedProcedureParameters?: any; +}; + +/** + * A copy activity Azure MySQL source. + */ +export type AzureMySqlSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMySqlSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Teradata source. + */ +export type TeradataSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TeradataSource"; + /** + * Teradata query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The partition mechanism that will be used for teradata read in parallel. + */ + partitionOption?: TeradataPartitionOption; + /** + * The settings that will be leveraged for teradata source partitioning. + */ + partitionSettings?: TeradataPartitionSettings; +}; + +/** + * A copy activity source for a Cassandra database. + */ +export type CassandraSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CassandraSource"; + /** + * Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The consistency level specifies how many Cassandra servers must respond to a read request before returning data to the client application. Cassandra checks the specified number of Cassandra servers for data to satisfy the read request. Must be one of cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive. + */ + consistencyLevel?: CassandraSourceReadConsistencyLevels; +}; + +/** + * A copy activity Amazon Marketplace Web Service source. + */ +export type AmazonMWSSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonMWSSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Azure PostgreSQL source. + */ +export type AzurePostgreSqlSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzurePostgreSqlSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Concur Service source. + */ +export type ConcurSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ConcurSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Couchbase server source. + */ +export type CouchbaseSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "CouchbaseSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Drill server source. + */ +export type DrillSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DrillSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Eloqua server source. + */ +export type EloquaSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "EloquaSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Google BigQuery service source. + */ +export type GoogleBigQuerySource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleBigQuerySource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Greenplum Database source. + */ +export type GreenplumSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GreenplumSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity HBase server source. + */ +export type HBaseSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HBaseSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Hive Server source. + */ +export type HiveSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HiveSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Hubspot Service source. + */ +export type HubspotSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "HubspotSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Impala server source. + */ +export type ImpalaSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ImpalaSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Jira Service source. + */ +export type JiraSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "JiraSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Magento server source. + */ +export type MagentoSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MagentoSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity MariaDB server source. + */ +export type MariaDBSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MariaDBSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Azure MariaDB source. + */ +export type AzureMariaDBSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AzureMariaDBSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Marketo server source. + */ +export type MarketoSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "MarketoSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Paypal Service source. + */ +export type PaypalSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PaypalSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Phoenix server source. + */ +export type PhoenixSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PhoenixSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Presto server source. + */ +export type PrestoSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "PrestoSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity QuickBooks server source. + */ +export type QuickBooksSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "QuickBooksSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity ServiceNow server source. + */ +export type ServiceNowSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ServiceNowSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Shopify Service source. + */ +export type ShopifySource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ShopifySource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Spark Server source. + */ +export type SparkSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SparkSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Square Service source. + */ +export type SquareSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SquareSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Xero Service source. + */ +export type XeroSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "XeroSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Zoho server source. + */ +export type ZohoSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ZohoSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Netezza source. + */ +export type NetezzaSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "NetezzaSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The partition mechanism that will be used for Netezza read in parallel. + */ + partitionOption?: NetezzaPartitionOption; + /** + * The settings that will be leveraged for Netezza source partitioning. + */ + partitionSettings?: NetezzaPartitionSettings; +}; + +/** + * A copy activity Vertica source. + */ +export type VerticaSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "VerticaSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Salesforce Marketing Cloud source. + */ +export type SalesforceMarketingCloudSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "SalesforceMarketingCloudSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Responsys source. + */ +export type ResponsysSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "ResponsysSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Dynamics AX source. + */ +export type DynamicsAXSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "DynamicsAXSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Oracle Service Cloud source. + */ +export type OracleServiceCloudSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "OracleServiceCloudSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity Google AdWords service source. + */ +export type GoogleAdWordsSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "GoogleAdWordsSource"; + /** + * A query to retrieve data from source. Type: string (or Expression with resultType string). + */ + query?: any; +}; + +/** + * A copy activity source for Amazon Redshift Source. + */ +export type AmazonRedshiftSource = TabularSource & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "AmazonRedshiftSource"; + /** + * Database query. Type: string (or Expression with resultType string). + */ + query?: any; + /** + * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then copied into the targeted sink from the interim S3. + */ + redshiftUnloadSettings?: RedshiftUnloadSettings; +}; + +/** + * Referenced tumbling window trigger dependency. + */ +export type TumblingWindowTriggerDependencyReference = TriggerDependencyReference & { + /** + * Polymorphic discriminator, which specifies the different types this object can be + */ + type: "TumblingWindowTriggerDependencyReference"; + /** + * Timespan applied to the start time of a tumbling window when evaluating dependency. + */ + offset?: string; + /** + * The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used. + */ + size?: string; +}; + +/** + * Defines headers for DataFlowDebugSession_createDataFlowDebugSession operation. + */ +export interface DataFlowDebugSessionCreateDataFlowDebugSessionHeaders { + /** + * URI to poll for asynchronous operation status. + */ + location?: string; +} + +/** + * Defines headers for DataFlowDebugSession_executeCommand operation. + */ +export interface DataFlowDebugSessionExecuteCommandHeaders { + /** + * URI to poll for asynchronous operation status. + */ + location?: string; +} + +/** + * Known values of {@link IntegrationRuntimeReferenceType} that the service accepts. + */ +export const enum KnownIntegrationRuntimeReferenceType { + IntegrationRuntimeReference = "IntegrationRuntimeReference" +} + +/** + * Defines values for IntegrationRuntimeReferenceType. \ + * {@link KnownIntegrationRuntimeReferenceType} can be used interchangeably with IntegrationRuntimeReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **IntegrationRuntimeReference** + */ +export type IntegrationRuntimeReferenceType = string; + +/** + * Known values of {@link ParameterType} that the service accepts. + */ +export const enum KnownParameterType { + Object = "Object", + String = "String", + Int = "Int", + Float = "Float", + Bool = "Bool", + Array = "Array", + SecureString = "SecureString" +} + +/** + * Defines values for ParameterType. \ + * {@link KnownParameterType} can be used interchangeably with ParameterType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Object** \ + * **String** \ + * **Int** \ + * **Float** \ + * **Bool** \ + * **Array** \ + * **SecureString** + */ +export type ParameterType = string; + +/** + * Known values of {@link Type} that the service accepts. + */ +export const enum KnownType { + LinkedServiceReference = "LinkedServiceReference" +} + +/** + * Defines values for Type. \ + * {@link KnownType} can be used interchangeably with Type, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **LinkedServiceReference** + */ +export type Type = string; + +/** + * Known values of {@link DependencyCondition} that the service accepts. + */ +export const enum KnownDependencyCondition { + Succeeded = "Succeeded", + Failed = "Failed", + Skipped = "Skipped", + Completed = "Completed" +} + +/** + * Defines values for DependencyCondition. \ + * {@link KnownDependencyCondition} can be used interchangeably with DependencyCondition, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Succeeded** \ + * **Failed** \ + * **Skipped** \ + * **Completed** + */ +export type DependencyCondition = string; + +/** + * Known values of {@link VariableType} that the service accepts. + */ +export const enum KnownVariableType { + String = "String", + Bool = "Bool", + Boolean = "Boolean", + Array = "Array" +} + +/** + * Defines values for VariableType. \ + * {@link KnownVariableType} can be used interchangeably with VariableType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **String** \ + * **Bool** \ + * **Boolean** \ + * **Array** + */ +export type VariableType = string; + +/** + * Known values of {@link RunQueryFilterOperand} that the service accepts. + */ +export const enum KnownRunQueryFilterOperand { + PipelineName = "PipelineName", + Status = "Status", + RunStart = "RunStart", + RunEnd = "RunEnd", + ActivityName = "ActivityName", + ActivityRunStart = "ActivityRunStart", + ActivityRunEnd = "ActivityRunEnd", + ActivityType = "ActivityType", + TriggerName = "TriggerName", + TriggerRunTimestamp = "TriggerRunTimestamp", + RunGroupId = "RunGroupId", + LatestOnly = "LatestOnly" +} + +/** + * Defines values for RunQueryFilterOperand. \ + * {@link KnownRunQueryFilterOperand} can be used interchangeably with RunQueryFilterOperand, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **PipelineName** \ + * **Status** \ + * **RunStart** \ + * **RunEnd** \ + * **ActivityName** \ + * **ActivityRunStart** \ + * **ActivityRunEnd** \ + * **ActivityType** \ + * **TriggerName** \ + * **TriggerRunTimestamp** \ + * **RunGroupId** \ + * **LatestOnly** + */ +export type RunQueryFilterOperand = string; + +/** + * Known values of {@link RunQueryFilterOperator} that the service accepts. + */ +export const enum KnownRunQueryFilterOperator { + Equals = "Equals", + NotEquals = "NotEquals", + In = "In", + NotIn = "NotIn" +} + +/** + * Defines values for RunQueryFilterOperator. \ + * {@link KnownRunQueryFilterOperator} can be used interchangeably with RunQueryFilterOperator, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Equals** \ + * **NotEquals** \ + * **In** \ + * **NotIn** + */ +export type RunQueryFilterOperator = string; + +/** + * Known values of {@link RunQueryOrderByField} that the service accepts. + */ +export const enum KnownRunQueryOrderByField { + RunStart = "RunStart", + RunEnd = "RunEnd", + PipelineName = "PipelineName", + Status = "Status", + ActivityName = "ActivityName", + ActivityRunStart = "ActivityRunStart", + ActivityRunEnd = "ActivityRunEnd", + TriggerName = "TriggerName", + TriggerRunTimestamp = "TriggerRunTimestamp" +} + +/** + * Defines values for RunQueryOrderByField. \ + * {@link KnownRunQueryOrderByField} can be used interchangeably with RunQueryOrderByField, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **RunStart** \ + * **RunEnd** \ + * **PipelineName** \ + * **Status** \ + * **ActivityName** \ + * **ActivityRunStart** \ + * **ActivityRunEnd** \ + * **TriggerName** \ + * **TriggerRunTimestamp** + */ +export type RunQueryOrderByField = string; + +/** + * Known values of {@link RunQueryOrder} that the service accepts. + */ +export const enum KnownRunQueryOrder { + ASC = "ASC", + Desc = "DESC" +} + +/** + * Defines values for RunQueryOrder. \ + * {@link KnownRunQueryOrder} can be used interchangeably with RunQueryOrder, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ASC** \ + * **DESC** + */ +export type RunQueryOrder = string; + +/** + * Known values of {@link TriggerRuntimeState} that the service accepts. + */ +export const enum KnownTriggerRuntimeState { + Started = "Started", + Stopped = "Stopped", + Disabled = "Disabled" +} + +/** + * Defines values for TriggerRuntimeState. \ + * {@link KnownTriggerRuntimeState} can be used interchangeably with TriggerRuntimeState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Started** \ + * **Stopped** \ + * **Disabled** + */ +export type TriggerRuntimeState = string; + +/** + * Known values of {@link EventSubscriptionStatus} that the service accepts. + */ +export const enum KnownEventSubscriptionStatus { + Enabled = "Enabled", + Provisioning = "Provisioning", + Deprovisioning = "Deprovisioning", + Disabled = "Disabled", + Unknown = "Unknown" +} + +/** + * Defines values for EventSubscriptionStatus. \ + * {@link KnownEventSubscriptionStatus} can be used interchangeably with EventSubscriptionStatus, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Enabled** \ + * **Provisioning** \ + * **Deprovisioning** \ + * **Disabled** \ + * **Unknown** + */ +export type EventSubscriptionStatus = string; + +/** + * Known values of {@link TriggerRunStatus} that the service accepts. + */ +export const enum KnownTriggerRunStatus { + Succeeded = "Succeeded", + Failed = "Failed", + Inprogress = "Inprogress" +} + +/** + * Defines values for TriggerRunStatus. \ + * {@link KnownTriggerRunStatus} can be used interchangeably with TriggerRunStatus, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Succeeded** \ + * **Failed** \ + * **Inprogress** + */ +export type TriggerRunStatus = string; + +/** + * Known values of {@link SqlScriptType} that the service accepts. + */ +export const enum KnownSqlScriptType { + SqlQuery = "SqlQuery" +} + +/** + * Defines values for SqlScriptType. \ + * {@link KnownSqlScriptType} can be used interchangeably with SqlScriptType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SqlQuery** + */ +export type SqlScriptType = string; + +/** + * Known values of {@link SqlConnectionType} that the service accepts. + */ +export const enum KnownSqlConnectionType { + SqlOnDemand = "SqlOnDemand", + SqlPool = "SqlPool" +} + +/** + * Defines values for SqlConnectionType. \ + * {@link KnownSqlConnectionType} can be used interchangeably with SqlConnectionType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SqlOnDemand** \ + * **SqlPool** + */ +export type SqlConnectionType = string; + +/** + * Known values of {@link BigDataPoolReferenceType} that the service accepts. + */ +export const enum KnownBigDataPoolReferenceType { + BigDataPoolReference = "BigDataPoolReference" +} + +/** + * Defines values for BigDataPoolReferenceType. \ + * {@link KnownBigDataPoolReferenceType} can be used interchangeably with BigDataPoolReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **BigDataPoolReference** + */ +export type BigDataPoolReferenceType = string; + +/** + * Known values of {@link SparkJobType} that the service accepts. + */ +export const enum KnownSparkJobType { + SparkBatch = "SparkBatch", + SparkSession = "SparkSession" +} + +/** + * Defines values for SparkJobType. \ + * {@link KnownSparkJobType} can be used interchangeably with SparkJobType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SparkBatch** \ + * **SparkSession** + */ +export type SparkJobType = string; + +/** + * Known values of {@link SparkBatchJobResultType} that the service accepts. + */ +export const enum KnownSparkBatchJobResultType { + Uncertain = "Uncertain", + Succeeded = "Succeeded", + Failed = "Failed", + Cancelled = "Cancelled" +} + +/** + * Defines values for SparkBatchJobResultType. \ + * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Uncertain** \ + * **Succeeded** \ + * **Failed** \ + * **Cancelled** + */ +export type SparkBatchJobResultType = string; + +/** + * Known values of {@link SchedulerCurrentState} that the service accepts. + */ +export const enum KnownSchedulerCurrentState { + Queued = "Queued", + Scheduled = "Scheduled", + Ended = "Ended" +} + +/** + * Defines values for SchedulerCurrentState. \ + * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Queued** \ + * **Scheduled** \ + * **Ended** + */ +export type SchedulerCurrentState = string; + +/** + * Known values of {@link PluginCurrentState} that the service accepts. + */ +export const enum KnownPluginCurrentState { + Preparation = "Preparation", + ResourceAcquisition = "ResourceAcquisition", + Queued = "Queued", + Submission = "Submission", + Monitoring = "Monitoring", + Cleanup = "Cleanup", + Ended = "Ended" +} + +/** + * Defines values for PluginCurrentState. \ + * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Preparation** \ + * **ResourceAcquisition** \ + * **Queued** \ + * **Submission** \ + * **Monitoring** \ + * **Cleanup** \ + * **Ended** + */ +export type PluginCurrentState = string; + +/** + * Known values of {@link SparkErrorSource} that the service accepts. + */ +export const enum KnownSparkErrorSource { + System = "System", + User = "User", + Unknown = "Unknown", + Dependency = "Dependency" +} + +/** + * Defines values for SparkErrorSource. \ + * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **System** \ + * **User** \ + * **Unknown** \ + * **Dependency** + */ +export type SparkErrorSource = string; + +/** + * Known values of {@link CellOutputType} that the service accepts. + */ +export const enum KnownCellOutputType { + ExecuteResult = "execute_result", + DisplayData = "display_data", + Stream = "stream", + Error = "error" +} + +/** + * Defines values for CellOutputType. \ + * {@link KnownCellOutputType} can be used interchangeably with CellOutputType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **execute_result** \ + * **display_data** \ + * **stream** \ + * **error** + */ +export type CellOutputType = string; + +/** + * Known values of {@link NodeSize} that the service accepts. + */ +export const enum KnownNodeSize { + None = "None", + Small = "Small", + Medium = "Medium", + Large = "Large", + XLarge = "XLarge", + XXLarge = "XXLarge", + XXXLarge = "XXXLarge" +} + +/** + * Defines values for NodeSize. \ + * {@link KnownNodeSize} can be used interchangeably with NodeSize, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **Small** \ + * **Medium** \ + * **Large** \ + * **XLarge** \ + * **XXLarge** \ + * **XXXLarge** + */ +export type NodeSize = string; + +/** + * Known values of {@link NodeSizeFamily} that the service accepts. + */ +export const enum KnownNodeSizeFamily { + None = "None", + MemoryOptimized = "MemoryOptimized" +} + +/** + * Defines values for NodeSizeFamily. \ + * {@link KnownNodeSizeFamily} can be used interchangeably with NodeSizeFamily, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **MemoryOptimized** + */ +export type NodeSizeFamily = string; + +/** + * Known values of {@link IntegrationRuntimeType} that the service accepts. + */ +export const enum KnownIntegrationRuntimeType { + Managed = "Managed", + SelfHosted = "SelfHosted" +} + +/** + * Defines values for IntegrationRuntimeType. \ + * {@link KnownIntegrationRuntimeType} can be used interchangeably with IntegrationRuntimeType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Managed** \ + * **SelfHosted** + */ +export type IntegrationRuntimeType = string; + +/** + * Known values of {@link ExpressionType} that the service accepts. + */ +export const enum KnownExpressionType { + Expression = "Expression" +} + +/** + * Defines values for ExpressionType. \ + * {@link KnownExpressionType} can be used interchangeably with ExpressionType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Expression** + */ +export type ExpressionType = string; + +/** + * Known values of {@link PipelineReferenceType} that the service accepts. + */ +export const enum KnownPipelineReferenceType { + PipelineReference = "PipelineReference" +} + +/** + * Defines values for PipelineReferenceType. \ + * {@link KnownPipelineReferenceType} can be used interchangeably with PipelineReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **PipelineReference** + */ +export type PipelineReferenceType = string; + +/** + * Known values of {@link DatasetReferenceType} that the service accepts. + */ +export const enum KnownDatasetReferenceType { + DatasetReference = "DatasetReference" +} + +/** + * Defines values for DatasetReferenceType. \ + * {@link KnownDatasetReferenceType} can be used interchangeably with DatasetReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **DatasetReference** + */ +export type DatasetReferenceType = string; + +/** + * Known values of {@link DataFlowReferenceType} that the service accepts. + */ +export const enum KnownDataFlowReferenceType { + DataFlowReference = "DataFlowReference" +} + +/** + * Defines values for DataFlowReferenceType. \ + * {@link KnownDataFlowReferenceType} can be used interchangeably with DataFlowReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **DataFlowReference** + */ +export type DataFlowReferenceType = string; + +/** + * Known values of {@link NotebookReferenceType} that the service accepts. + */ +export const enum KnownNotebookReferenceType { + NotebookReference = "NotebookReference" +} + +/** + * Defines values for NotebookReferenceType. \ + * {@link KnownNotebookReferenceType} can be used interchangeably with NotebookReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **NotebookReference** + */ +export type NotebookReferenceType = string; + +/** + * Known values of {@link SparkJobReferenceType} that the service accepts. + */ +export const enum KnownSparkJobReferenceType { + SparkJobDefinitionReference = "SparkJobDefinitionReference" +} + +/** + * Defines values for SparkJobReferenceType. \ + * {@link KnownSparkJobReferenceType} can be used interchangeably with SparkJobReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SparkJobDefinitionReference** + */ +export type SparkJobReferenceType = string; + +/** + * Known values of {@link SqlPoolReferenceType} that the service accepts. + */ +export const enum KnownSqlPoolReferenceType { + SqlPoolReference = "SqlPoolReference" +} + +/** + * Defines values for SqlPoolReferenceType. \ + * {@link KnownSqlPoolReferenceType} can be used interchangeably with SqlPoolReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SqlPoolReference** + */ +export type SqlPoolReferenceType = string; + +/** + * Known values of {@link JsonFormatFilePattern} that the service accepts. + */ +export const enum KnownJsonFormatFilePattern { + SetOfObjects = "setOfObjects", + ArrayOfObjects = "arrayOfObjects" +} + +/** + * Defines values for JsonFormatFilePattern. \ + * {@link KnownJsonFormatFilePattern} can be used interchangeably with JsonFormatFilePattern, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **setOfObjects** \ + * **arrayOfObjects** + */ +export type JsonFormatFilePattern = string; + +/** + * Known values of {@link DatasetCompressionLevel} that the service accepts. + */ +export const enum KnownDatasetCompressionLevel { + Optimal = "Optimal", + Fastest = "Fastest" +} + +/** + * Defines values for DatasetCompressionLevel. \ + * {@link KnownDatasetCompressionLevel} can be used interchangeably with DatasetCompressionLevel, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Optimal** \ + * **Fastest** + */ +export type DatasetCompressionLevel = string; + +/** + * Known values of {@link AvroCompressionCodec} that the service accepts. + */ +export const enum KnownAvroCompressionCodec { + None = "none", + Deflate = "deflate", + Snappy = "snappy", + Xz = "xz", + Bzip2 = "bzip2" +} + +/** + * Defines values for AvroCompressionCodec. \ + * {@link KnownAvroCompressionCodec} can be used interchangeably with AvroCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **none** \ + * **deflate** \ + * **snappy** \ + * **xz** \ + * **bzip2** + */ +export type AvroCompressionCodec = string; + +/** + * Known values of {@link ParquetCompressionCodec} that the service accepts. + */ +export const enum KnownParquetCompressionCodec { + None = "none", + Gzip = "gzip", + Snappy = "snappy", + Lzo = "lzo" +} + +/** + * Defines values for ParquetCompressionCodec. \ + * {@link KnownParquetCompressionCodec} can be used interchangeably with ParquetCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **none** \ + * **gzip** \ + * **snappy** \ + * **lzo** + */ +export type ParquetCompressionCodec = string; + +/** + * Known values of {@link DelimitedTextCompressionCodec} that the service accepts. + */ +export const enum KnownDelimitedTextCompressionCodec { + Bzip2 = "bzip2", + Gzip = "gzip", + Deflate = "deflate", + ZipDeflate = "zipDeflate", + Snappy = "snappy", + Lz4 = "lz4" +} + +/** + * Defines values for DelimitedTextCompressionCodec. \ + * {@link KnownDelimitedTextCompressionCodec} can be used interchangeably with DelimitedTextCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **bzip2** \ + * **gzip** \ + * **deflate** \ + * **zipDeflate** \ + * **snappy** \ + * **lz4** + */ +export type DelimitedTextCompressionCodec = string; + +/** + * Known values of {@link OrcCompressionCodec} that the service accepts. + */ +export const enum KnownOrcCompressionCodec { + None = "none", + Zlib = "zlib", + Snappy = "snappy" +} + +/** + * Defines values for OrcCompressionCodec. \ + * {@link KnownOrcCompressionCodec} can be used interchangeably with OrcCompressionCodec, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **none** \ + * **zlib** \ + * **snappy** + */ +export type OrcCompressionCodec = string; + +/** + * Known values of {@link DynamicsDeploymentType} that the service accepts. + */ +export const enum KnownDynamicsDeploymentType { + Online = "Online", + OnPremisesWithIfd = "OnPremisesWithIfd" +} + +/** + * Defines values for DynamicsDeploymentType. \ + * {@link KnownDynamicsDeploymentType} can be used interchangeably with DynamicsDeploymentType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Online** \ + * **OnPremisesWithIfd** + */ +export type DynamicsDeploymentType = string; + +/** + * Known values of {@link DynamicsAuthenticationType} that the service accepts. + */ +export const enum KnownDynamicsAuthenticationType { + Office365 = "Office365", + Ifd = "Ifd", + AADServicePrincipal = "AADServicePrincipal" +} + +/** + * Defines values for DynamicsAuthenticationType. \ + * {@link KnownDynamicsAuthenticationType} can be used interchangeably with DynamicsAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Office365** \ + * **Ifd** \ + * **AADServicePrincipal** + */ +export type DynamicsAuthenticationType = string; + +/** + * Known values of {@link DynamicsServicePrincipalCredentialType} that the service accepts. + */ +export const enum KnownDynamicsServicePrincipalCredentialType { + ServicePrincipalKey = "ServicePrincipalKey", + ServicePrincipalCert = "ServicePrincipalCert" +} + +/** + * Defines values for DynamicsServicePrincipalCredentialType. \ + * {@link KnownDynamicsServicePrincipalCredentialType} can be used interchangeably with DynamicsServicePrincipalCredentialType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ServicePrincipalKey** \ + * **ServicePrincipalCert** + */ +export type DynamicsServicePrincipalCredentialType = string; + +/** + * Known values of {@link SybaseAuthenticationType} that the service accepts. + */ +export const enum KnownSybaseAuthenticationType { + Basic = "Basic", + Windows = "Windows" +} + +/** + * Defines values for SybaseAuthenticationType. \ + * {@link KnownSybaseAuthenticationType} can be used interchangeably with SybaseAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Windows** + */ +export type SybaseAuthenticationType = string; + +/** + * Known values of {@link Db2AuthenticationType} that the service accepts. + */ +export const enum KnownDb2AuthenticationType { + Basic = "Basic" +} + +/** + * Defines values for Db2AuthenticationType. \ + * {@link KnownDb2AuthenticationType} can be used interchangeably with Db2AuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** + */ +export type Db2AuthenticationType = string; + +/** + * Known values of {@link TeradataAuthenticationType} that the service accepts. + */ +export const enum KnownTeradataAuthenticationType { + Basic = "Basic", + Windows = "Windows" +} + +/** + * Defines values for TeradataAuthenticationType. \ + * {@link KnownTeradataAuthenticationType} can be used interchangeably with TeradataAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Windows** + */ +export type TeradataAuthenticationType = string; + +/** + * Known values of {@link ODataAuthenticationType} that the service accepts. + */ +export const enum KnownODataAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous", + Windows = "Windows", + AadServicePrincipal = "AadServicePrincipal", + ManagedServiceIdentity = "ManagedServiceIdentity" +} + +/** + * Defines values for ODataAuthenticationType. \ + * {@link KnownODataAuthenticationType} can be used interchangeably with ODataAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** \ + * **Windows** \ + * **AadServicePrincipal** \ + * **ManagedServiceIdentity** + */ +export type ODataAuthenticationType = string; + +/** + * Known values of {@link ODataAadServicePrincipalCredentialType} that the service accepts. + */ +export const enum KnownODataAadServicePrincipalCredentialType { + ServicePrincipalKey = "ServicePrincipalKey", + ServicePrincipalCert = "ServicePrincipalCert" +} + +/** + * Defines values for ODataAadServicePrincipalCredentialType. \ + * {@link KnownODataAadServicePrincipalCredentialType} can be used interchangeably with ODataAadServicePrincipalCredentialType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ServicePrincipalKey** \ + * **ServicePrincipalCert** + */ +export type ODataAadServicePrincipalCredentialType = string; + +/** + * Known values of {@link WebAuthenticationType} that the service accepts. + */ +export const enum KnownWebAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous", + ClientCertificate = "ClientCertificate" +} + +/** + * Defines values for WebAuthenticationType. \ + * {@link KnownWebAuthenticationType} can be used interchangeably with WebAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** \ + * **ClientCertificate** + */ +export type WebAuthenticationType = string; + +/** + * Known values of {@link MongoDbAuthenticationType} that the service accepts. + */ +export const enum KnownMongoDbAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous" +} + +/** + * Defines values for MongoDbAuthenticationType. \ + * {@link KnownMongoDbAuthenticationType} can be used interchangeably with MongoDbAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** + */ +export type MongoDbAuthenticationType = string; + +/** + * Known values of {@link RestServiceAuthenticationType} that the service accepts. + */ +export const enum KnownRestServiceAuthenticationType { + Anonymous = "Anonymous", + Basic = "Basic", + AadServicePrincipal = "AadServicePrincipal", + ManagedServiceIdentity = "ManagedServiceIdentity" +} + +/** + * Defines values for RestServiceAuthenticationType. \ + * {@link KnownRestServiceAuthenticationType} can be used interchangeably with RestServiceAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **Basic** \ + * **AadServicePrincipal** \ + * **ManagedServiceIdentity** + */ +export type RestServiceAuthenticationType = string; + +/** + * Known values of {@link HttpAuthenticationType} that the service accepts. + */ +export const enum KnownHttpAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous", + Digest = "Digest", + Windows = "Windows", + ClientCertificate = "ClientCertificate" +} + +/** + * Defines values for HttpAuthenticationType. \ + * {@link KnownHttpAuthenticationType} can be used interchangeably with HttpAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** \ + * **Digest** \ + * **Windows** \ + * **ClientCertificate** + */ +export type HttpAuthenticationType = string; + +/** + * Known values of {@link FtpAuthenticationType} that the service accepts. + */ +export const enum KnownFtpAuthenticationType { + Basic = "Basic", + Anonymous = "Anonymous" +} + +/** + * Defines values for FtpAuthenticationType. \ + * {@link KnownFtpAuthenticationType} can be used interchangeably with FtpAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Anonymous** + */ +export type FtpAuthenticationType = string; + +/** + * Known values of {@link SftpAuthenticationType} that the service accepts. + */ +export const enum KnownSftpAuthenticationType { + Basic = "Basic", + SshPublicKey = "SshPublicKey" +} + +/** + * Defines values for SftpAuthenticationType. \ + * {@link KnownSftpAuthenticationType} can be used interchangeably with SftpAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **SshPublicKey** + */ +export type SftpAuthenticationType = string; + +/** + * Known values of {@link SapHanaAuthenticationType} that the service accepts. + */ +export const enum KnownSapHanaAuthenticationType { + Basic = "Basic", + Windows = "Windows" +} + +/** + * Defines values for SapHanaAuthenticationType. \ + * {@link KnownSapHanaAuthenticationType} can be used interchangeably with SapHanaAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Windows** + */ +export type SapHanaAuthenticationType = string; + +/** + * Known values of {@link GoogleBigQueryAuthenticationType} that the service accepts. + */ +export const enum KnownGoogleBigQueryAuthenticationType { + ServiceAuthentication = "ServiceAuthentication", + UserAuthentication = "UserAuthentication" +} + +/** + * Defines values for GoogleBigQueryAuthenticationType. \ + * {@link KnownGoogleBigQueryAuthenticationType} can be used interchangeably with GoogleBigQueryAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ServiceAuthentication** \ + * **UserAuthentication** + */ +export type GoogleBigQueryAuthenticationType = string; + +/** + * Known values of {@link HBaseAuthenticationType} that the service accepts. + */ +export const enum KnownHBaseAuthenticationType { + Anonymous = "Anonymous", + Basic = "Basic" +} + +/** + * Defines values for HBaseAuthenticationType. \ + * {@link KnownHBaseAuthenticationType} can be used interchangeably with HBaseAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **Basic** + */ +export type HBaseAuthenticationType = string; + +/** + * Known values of {@link HiveServerType} that the service accepts. + */ +export const enum KnownHiveServerType { + HiveServer1 = "HiveServer1", + HiveServer2 = "HiveServer2", + HiveThriftServer = "HiveThriftServer" +} + +/** + * Defines values for HiveServerType. \ + * {@link KnownHiveServerType} can be used interchangeably with HiveServerType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **HiveServer1** \ + * **HiveServer2** \ + * **HiveThriftServer** + */ +export type HiveServerType = string; + +/** + * Known values of {@link HiveThriftTransportProtocol} that the service accepts. + */ +export const enum KnownHiveThriftTransportProtocol { + Binary = "Binary", + Sasl = "SASL", + Http = "HTTP " +} + +/** + * Defines values for HiveThriftTransportProtocol. \ + * {@link KnownHiveThriftTransportProtocol} can be used interchangeably with HiveThriftTransportProtocol, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Binary** \ + * **SASL** \ + * **HTTP ** + */ +export type HiveThriftTransportProtocol = string; + +/** + * Known values of {@link HiveAuthenticationType} that the service accepts. + */ +export const enum KnownHiveAuthenticationType { + Anonymous = "Anonymous", + Username = "Username", + UsernameAndPassword = "UsernameAndPassword", + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +/** + * Defines values for HiveAuthenticationType. \ + * {@link KnownHiveAuthenticationType} can be used interchangeably with HiveAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **Username** \ + * **UsernameAndPassword** \ + * **WindowsAzureHDInsightService** + */ +export type HiveAuthenticationType = string; + +/** + * Known values of {@link ImpalaAuthenticationType} that the service accepts. + */ +export const enum KnownImpalaAuthenticationType { + Anonymous = "Anonymous", + SaslUsername = "SASLUsername", + UsernameAndPassword = "UsernameAndPassword" +} + +/** + * Defines values for ImpalaAuthenticationType. \ + * {@link KnownImpalaAuthenticationType} can be used interchangeably with ImpalaAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **SASLUsername** \ + * **UsernameAndPassword** + */ +export type ImpalaAuthenticationType = string; + +/** + * Known values of {@link PhoenixAuthenticationType} that the service accepts. + */ +export const enum KnownPhoenixAuthenticationType { + Anonymous = "Anonymous", + UsernameAndPassword = "UsernameAndPassword", + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +/** + * Defines values for PhoenixAuthenticationType. \ + * {@link KnownPhoenixAuthenticationType} can be used interchangeably with PhoenixAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **UsernameAndPassword** \ + * **WindowsAzureHDInsightService** + */ +export type PhoenixAuthenticationType = string; + +/** + * Known values of {@link PrestoAuthenticationType} that the service accepts. + */ +export const enum KnownPrestoAuthenticationType { + Anonymous = "Anonymous", + Ldap = "LDAP" +} + +/** + * Defines values for PrestoAuthenticationType. \ + * {@link KnownPrestoAuthenticationType} can be used interchangeably with PrestoAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **LDAP** + */ +export type PrestoAuthenticationType = string; + +/** + * Known values of {@link ServiceNowAuthenticationType} that the service accepts. + */ +export const enum KnownServiceNowAuthenticationType { + Basic = "Basic", + OAuth2 = "OAuth2" +} + +/** + * Defines values for ServiceNowAuthenticationType. \ + * {@link KnownServiceNowAuthenticationType} can be used interchangeably with ServiceNowAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **OAuth2** + */ +export type ServiceNowAuthenticationType = string; + +/** + * Known values of {@link SparkServerType} that the service accepts. + */ +export const enum KnownSparkServerType { + SharkServer = "SharkServer", + SharkServer2 = "SharkServer2", + SparkThriftServer = "SparkThriftServer" +} + +/** + * Defines values for SparkServerType. \ + * {@link KnownSparkServerType} can be used interchangeably with SparkServerType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SharkServer** \ + * **SharkServer2** \ + * **SparkThriftServer** + */ +export type SparkServerType = string; + +/** + * Known values of {@link SparkThriftTransportProtocol} that the service accepts. + */ +export const enum KnownSparkThriftTransportProtocol { + Binary = "Binary", + Sasl = "SASL", + Http = "HTTP " +} + +/** + * Defines values for SparkThriftTransportProtocol. \ + * {@link KnownSparkThriftTransportProtocol} can be used interchangeably with SparkThriftTransportProtocol, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Binary** \ + * **SASL** \ + * **HTTP ** + */ +export type SparkThriftTransportProtocol = string; + +/** + * Known values of {@link SparkAuthenticationType} that the service accepts. + */ +export const enum KnownSparkAuthenticationType { + Anonymous = "Anonymous", + Username = "Username", + UsernameAndPassword = "UsernameAndPassword", + WindowsAzureHDInsightService = "WindowsAzureHDInsightService" +} + +/** + * Defines values for SparkAuthenticationType. \ + * {@link KnownSparkAuthenticationType} can be used interchangeably with SparkAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Anonymous** \ + * **Username** \ + * **UsernameAndPassword** \ + * **WindowsAzureHDInsightService** + */ +export type SparkAuthenticationType = string; + +/** + * Known values of {@link HdiNodeTypes} that the service accepts. + */ +export const enum KnownHdiNodeTypes { + Headnode = "Headnode", + Workernode = "Workernode", + Zookeeper = "Zookeeper" +} + +/** + * Defines values for HdiNodeTypes. \ + * {@link KnownHdiNodeTypes} can be used interchangeably with HdiNodeTypes, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Headnode** \ + * **Workernode** \ + * **Zookeeper** + */ +export type HdiNodeTypes = string; + +/** + * Known values of {@link GoogleAdWordsAuthenticationType} that the service accepts. + */ +export const enum KnownGoogleAdWordsAuthenticationType { + ServiceAuthentication = "ServiceAuthentication", + UserAuthentication = "UserAuthentication" +} + +/** + * Defines values for GoogleAdWordsAuthenticationType. \ + * {@link KnownGoogleAdWordsAuthenticationType} can be used interchangeably with GoogleAdWordsAuthenticationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ServiceAuthentication** \ + * **UserAuthentication** + */ +export type GoogleAdWordsAuthenticationType = string; + +/** + * Known values of {@link JsonWriteFilePattern} that the service accepts. + */ +export const enum KnownJsonWriteFilePattern { + SetOfObjects = "setOfObjects", + ArrayOfObjects = "arrayOfObjects" +} + +/** + * Defines values for JsonWriteFilePattern. \ + * {@link KnownJsonWriteFilePattern} can be used interchangeably with JsonWriteFilePattern, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **setOfObjects** \ + * **arrayOfObjects** + */ +export type JsonWriteFilePattern = string; + +/** + * Known values of {@link SalesforceSourceReadBehavior} that the service accepts. + */ +export const enum KnownSalesforceSourceReadBehavior { + Query = "Query", + QueryAll = "QueryAll" +} + +/** + * Defines values for SalesforceSourceReadBehavior. \ + * {@link KnownSalesforceSourceReadBehavior} can be used interchangeably with SalesforceSourceReadBehavior, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Query** \ + * **QueryAll** + */ +export type SalesforceSourceReadBehavior = string; + +/** + * Known values of {@link SapHanaPartitionOption} that the service accepts. + */ +export const enum KnownSapHanaPartitionOption { + None = "None", + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", + SapHanaDynamicRange = "SapHanaDynamicRange" +} + +/** + * Defines values for SapHanaPartitionOption. \ + * {@link KnownSapHanaPartitionOption} can be used interchangeably with SapHanaPartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **PhysicalPartitionsOfTable** \ + * **SapHanaDynamicRange** + */ +export type SapHanaPartitionOption = string; + +/** + * Known values of {@link SapTablePartitionOption} that the service accepts. + */ +export const enum KnownSapTablePartitionOption { + None = "None", + PartitionOnInt = "PartitionOnInt", + PartitionOnCalendarYear = "PartitionOnCalendarYear", + PartitionOnCalendarMonth = "PartitionOnCalendarMonth", + PartitionOnCalendarDate = "PartitionOnCalendarDate", + PartitionOnTime = "PartitionOnTime" +} + +/** + * Defines values for SapTablePartitionOption. \ + * {@link KnownSapTablePartitionOption} can be used interchangeably with SapTablePartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **PartitionOnInt** \ + * **PartitionOnCalendarYear** \ + * **PartitionOnCalendarMonth** \ + * **PartitionOnCalendarDate** \ + * **PartitionOnTime** + */ +export type SapTablePartitionOption = string; + +/** + * Known values of {@link StoredProcedureParameterType} that the service accepts. + */ +export const enum KnownStoredProcedureParameterType { + String = "String", + Int = "Int", + Int64 = "Int64", + Decimal = "Decimal", + Guid = "Guid", + Boolean = "Boolean", + Date = "Date" +} + +/** + * Defines values for StoredProcedureParameterType. \ + * {@link KnownStoredProcedureParameterType} can be used interchangeably with StoredProcedureParameterType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **String** \ + * **Int** \ + * **Int64** \ + * **Decimal** \ + * **Guid** \ + * **Boolean** \ + * **Date** + */ +export type StoredProcedureParameterType = string; + +/** + * Known values of {@link OraclePartitionOption} that the service accepts. + */ +export const enum KnownOraclePartitionOption { + None = "None", + PhysicalPartitionsOfTable = "PhysicalPartitionsOfTable", + DynamicRange = "DynamicRange" +} + +/** + * Defines values for OraclePartitionOption. \ + * {@link KnownOraclePartitionOption} can be used interchangeably with OraclePartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **PhysicalPartitionsOfTable** \ + * **DynamicRange** + */ +export type OraclePartitionOption = string; + +/** + * Known values of {@link TeradataPartitionOption} that the service accepts. + */ +export const enum KnownTeradataPartitionOption { + None = "None", + Hash = "Hash", + DynamicRange = "DynamicRange" +} + +/** + * Defines values for TeradataPartitionOption. \ + * {@link KnownTeradataPartitionOption} can be used interchangeably with TeradataPartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **Hash** \ + * **DynamicRange** + */ +export type TeradataPartitionOption = string; + +/** + * Known values of {@link CassandraSourceReadConsistencyLevels} that the service accepts. + */ +export const enum KnownCassandraSourceReadConsistencyLevels { + ALL = "ALL", + EachQuorum = "EACH_QUORUM", + Quorum = "QUORUM", + LocalQuorum = "LOCAL_QUORUM", + ONE = "ONE", + TWO = "TWO", + Three = "THREE", + LocalONE = "LOCAL_ONE", + Serial = "SERIAL", + LocalSerial = "LOCAL_SERIAL" +} + +/** + * Defines values for CassandraSourceReadConsistencyLevels. \ + * {@link KnownCassandraSourceReadConsistencyLevels} can be used interchangeably with CassandraSourceReadConsistencyLevels, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **ALL** \ + * **EACH_QUORUM** \ + * **QUORUM** \ + * **LOCAL_QUORUM** \ + * **ONE** \ + * **TWO** \ + * **THREE** \ + * **LOCAL_ONE** \ + * **SERIAL** \ + * **LOCAL_SERIAL** + */ +export type CassandraSourceReadConsistencyLevels = string; + +/** + * Known values of {@link NetezzaPartitionOption} that the service accepts. + */ +export const enum KnownNetezzaPartitionOption { + None = "None", + DataSlice = "DataSlice", + DynamicRange = "DynamicRange" +} + +/** + * Defines values for NetezzaPartitionOption. \ + * {@link KnownNetezzaPartitionOption} can be used interchangeably with NetezzaPartitionOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **DataSlice** \ + * **DynamicRange** + */ +export type NetezzaPartitionOption = string; + +/** + * Known values of {@link SapCloudForCustomerSinkWriteBehavior} that the service accepts. + */ +export const enum KnownSapCloudForCustomerSinkWriteBehavior { + Insert = "Insert", + Update = "Update" +} + +/** + * Defines values for SapCloudForCustomerSinkWriteBehavior. \ + * {@link KnownSapCloudForCustomerSinkWriteBehavior} can be used interchangeably with SapCloudForCustomerSinkWriteBehavior, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Insert** \ + * **Update** + */ +export type SapCloudForCustomerSinkWriteBehavior = string; + +/** + * Known values of {@link PolybaseSettingsRejectType} that the service accepts. + */ +export const enum KnownPolybaseSettingsRejectType { + Value = "value", + Percentage = "percentage" +} + +/** + * Defines values for PolybaseSettingsRejectType. \ + * {@link KnownPolybaseSettingsRejectType} can be used interchangeably with PolybaseSettingsRejectType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **value** \ + * **percentage** + */ +export type PolybaseSettingsRejectType = string; + +/** + * Known values of {@link AzureSearchIndexWriteBehaviorType} that the service accepts. + */ +export const enum KnownAzureSearchIndexWriteBehaviorType { + Merge = "Merge", + Upload = "Upload" +} + +/** + * Defines values for AzureSearchIndexWriteBehaviorType. \ + * {@link KnownAzureSearchIndexWriteBehaviorType} can be used interchangeably with AzureSearchIndexWriteBehaviorType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Merge** \ + * **Upload** + */ +export type AzureSearchIndexWriteBehaviorType = string; + +/** + * Known values of {@link DynamicsSinkWriteBehavior} that the service accepts. + */ +export const enum KnownDynamicsSinkWriteBehavior { + Upsert = "Upsert" +} + +/** + * Defines values for DynamicsSinkWriteBehavior. \ + * {@link KnownDynamicsSinkWriteBehavior} can be used interchangeably with DynamicsSinkWriteBehavior, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Upsert** + */ +export type DynamicsSinkWriteBehavior = string; + +/** + * Known values of {@link SalesforceSinkWriteBehavior} that the service accepts. + */ +export const enum KnownSalesforceSinkWriteBehavior { + Insert = "Insert", + Upsert = "Upsert" +} + +/** + * Defines values for SalesforceSinkWriteBehavior. \ + * {@link KnownSalesforceSinkWriteBehavior} can be used interchangeably with SalesforceSinkWriteBehavior, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Insert** \ + * **Upsert** + */ +export type SalesforceSinkWriteBehavior = string; + +/** + * Known values of {@link HDInsightActivityDebugInfoOption} that the service accepts. + */ +export const enum KnownHDInsightActivityDebugInfoOption { + None = "None", + Always = "Always", + Failure = "Failure" +} + +/** + * Defines values for HDInsightActivityDebugInfoOption. \ + * {@link KnownHDInsightActivityDebugInfoOption} can be used interchangeably with HDInsightActivityDebugInfoOption, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **None** \ + * **Always** \ + * **Failure** + */ +export type HDInsightActivityDebugInfoOption = string; + +/** + * Known values of {@link SsisPackageLocationType} that the service accepts. + */ +export const enum KnownSsisPackageLocationType { + Ssisdb = "SSISDB", + File = "File", + InlinePackage = "InlinePackage" +} + +/** + * Defines values for SsisPackageLocationType. \ + * {@link KnownSsisPackageLocationType} can be used interchangeably with SsisPackageLocationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SSISDB** \ + * **File** \ + * **InlinePackage** + */ +export type SsisPackageLocationType = string; + +/** + * Known values of {@link SsisLogLocationType} that the service accepts. + */ +export const enum KnownSsisLogLocationType { + File = "File" +} + +/** + * Defines values for SsisLogLocationType. \ + * {@link KnownSsisLogLocationType} can be used interchangeably with SsisLogLocationType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **File** + */ +export type SsisLogLocationType = string; + +/** + * Known values of {@link WebActivityMethod} that the service accepts. + */ +export const enum KnownWebActivityMethod { + GET = "GET", + Post = "POST", + PUT = "PUT", + Delete = "DELETE" +} + +/** + * Defines values for WebActivityMethod. \ + * {@link KnownWebActivityMethod} can be used interchangeably with WebActivityMethod, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **GET** \ + * **POST** \ + * **PUT** \ + * **DELETE** + */ +export type WebActivityMethod = string; + +/** + * Known values of {@link AzureFunctionActivityMethod} that the service accepts. + */ +export const enum KnownAzureFunctionActivityMethod { + GET = "GET", + Post = "POST", + PUT = "PUT", + Delete = "DELETE", + Options = "OPTIONS", + Head = "HEAD", + Trace = "TRACE" +} + +/** + * Defines values for AzureFunctionActivityMethod. \ + * {@link KnownAzureFunctionActivityMethod} can be used interchangeably with AzureFunctionActivityMethod, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **GET** \ + * **POST** \ + * **PUT** \ + * **DELETE** \ + * **OPTIONS** \ + * **HEAD** \ + * **TRACE** + */ +export type AzureFunctionActivityMethod = string; + +/** + * Known values of {@link WebHookActivityMethod} that the service accepts. + */ +export const enum KnownWebHookActivityMethod { + Post = "POST" +} + +/** + * Defines values for WebHookActivityMethod. \ + * {@link KnownWebHookActivityMethod} can be used interchangeably with WebHookActivityMethod, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **POST** + */ +export type WebHookActivityMethod = string; + +/** + * Known values of {@link DataFlowComputeType} that the service accepts. + */ +export const enum KnownDataFlowComputeType { + General = "General", + MemoryOptimized = "MemoryOptimized", + ComputeOptimized = "ComputeOptimized" +} + +/** + * Defines values for DataFlowComputeType. \ + * {@link KnownDataFlowComputeType} can be used interchangeably with DataFlowComputeType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **General** \ + * **MemoryOptimized** \ + * **ComputeOptimized** + */ +export type DataFlowComputeType = string; + +/** + * Known values of {@link RecurrenceFrequency} that the service accepts. + */ +export const enum KnownRecurrenceFrequency { + NotSpecified = "NotSpecified", + Minute = "Minute", + Hour = "Hour", + Day = "Day", + Week = "Week", + Month = "Month", + Year = "Year" +} + +/** + * Defines values for RecurrenceFrequency. \ + * {@link KnownRecurrenceFrequency} can be used interchangeably with RecurrenceFrequency, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **NotSpecified** \ + * **Minute** \ + * **Hour** \ + * **Day** \ + * **Week** \ + * **Month** \ + * **Year** + */ +export type RecurrenceFrequency = string; + +/** + * Known values of {@link BlobEventType} that the service accepts. + */ +export const enum KnownBlobEventType { + MicrosoftStorageBlobCreated = "Microsoft.Storage.BlobCreated", + MicrosoftStorageBlobDeleted = "Microsoft.Storage.BlobDeleted" +} + +/** + * Defines values for BlobEventType. \ + * {@link KnownBlobEventType} can be used interchangeably with BlobEventType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Microsoft.Storage.BlobCreated** \ + * **Microsoft.Storage.BlobDeleted** + */ +export type BlobEventType = string; + +/** + * Known values of {@link TumblingWindowFrequency} that the service accepts. + */ +export const enum KnownTumblingWindowFrequency { + Minute = "Minute", + Hour = "Hour" +} + +/** + * Defines values for TumblingWindowFrequency. \ + * {@link KnownTumblingWindowFrequency} can be used interchangeably with TumblingWindowFrequency, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Minute** \ + * **Hour** + */ +export type TumblingWindowFrequency = string; + +/** + * Known values of {@link TriggerReferenceType} that the service accepts. + */ +export const enum KnownTriggerReferenceType { + TriggerReference = "TriggerReference" +} + +/** + * Defines values for TriggerReferenceType. \ + * {@link KnownTriggerReferenceType} can be used interchangeably with TriggerReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **TriggerReference** + */ +export type TriggerReferenceType = string; + +/** + * Known values of {@link IntegrationRuntimeState} that the service accepts. + */ +export const enum KnownIntegrationRuntimeState { + Initial = "Initial", + Stopped = "Stopped", + Started = "Started", + Starting = "Starting", + Stopping = "Stopping", + NeedRegistration = "NeedRegistration", + Online = "Online", + Limited = "Limited", + Offline = "Offline", + AccessDenied = "AccessDenied" +} + +/** + * Defines values for IntegrationRuntimeState. \ + * {@link KnownIntegrationRuntimeState} can be used interchangeably with IntegrationRuntimeState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Initial** \ + * **Stopped** \ + * **Started** \ + * **Starting** \ + * **Stopping** \ + * **NeedRegistration** \ + * **Online** \ + * **Limited** \ + * **Offline** \ + * **AccessDenied** + */ +export type IntegrationRuntimeState = string; + +/** + * Known values of {@link IntegrationRuntimeSsisCatalogPricingTier} that the service accepts. + */ +export const enum KnownIntegrationRuntimeSsisCatalogPricingTier { + Basic = "Basic", + Standard = "Standard", + Premium = "Premium", + PremiumRS = "PremiumRS" +} + +/** + * Defines values for IntegrationRuntimeSsisCatalogPricingTier. \ + * {@link KnownIntegrationRuntimeSsisCatalogPricingTier} can be used interchangeably with IntegrationRuntimeSsisCatalogPricingTier, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Basic** \ + * **Standard** \ + * **Premium** \ + * **PremiumRS** + */ +export type IntegrationRuntimeSsisCatalogPricingTier = string; + +/** + * Known values of {@link IntegrationRuntimeLicenseType} that the service accepts. + */ +export const enum KnownIntegrationRuntimeLicenseType { + BasePrice = "BasePrice", + LicenseIncluded = "LicenseIncluded" +} + +/** + * Defines values for IntegrationRuntimeLicenseType. \ + * {@link KnownIntegrationRuntimeLicenseType} can be used interchangeably with IntegrationRuntimeLicenseType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **BasePrice** \ + * **LicenseIncluded** + */ +export type IntegrationRuntimeLicenseType = string; + +/** + * Known values of {@link IntegrationRuntimeEntityReferenceType} that the service accepts. + */ +export const enum KnownIntegrationRuntimeEntityReferenceType { + IntegrationRuntimeReference = "IntegrationRuntimeReference", + LinkedServiceReference = "LinkedServiceReference" +} + +/** + * Defines values for IntegrationRuntimeEntityReferenceType. \ + * {@link KnownIntegrationRuntimeEntityReferenceType} can be used interchangeably with IntegrationRuntimeEntityReferenceType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **IntegrationRuntimeReference** \ + * **LinkedServiceReference** + */ +export type IntegrationRuntimeEntityReferenceType = string; + +/** + * Known values of {@link IntegrationRuntimeEdition} that the service accepts. + */ +export const enum KnownIntegrationRuntimeEdition { + Standard = "Standard", + Enterprise = "Enterprise" +} + +/** + * Defines values for IntegrationRuntimeEdition. \ + * {@link KnownIntegrationRuntimeEdition} can be used interchangeably with IntegrationRuntimeEdition, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Standard** \ + * **Enterprise** + */ +export type IntegrationRuntimeEdition = string; + +/** + * Known values of {@link CopyBehaviorType} that the service accepts. + */ +export const enum KnownCopyBehaviorType { + PreserveHierarchy = "PreserveHierarchy", + FlattenHierarchy = "FlattenHierarchy", + MergeFiles = "MergeFiles" +} + +/** + * Defines values for CopyBehaviorType. \ + * {@link KnownCopyBehaviorType} can be used interchangeably with CopyBehaviorType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **PreserveHierarchy** \ + * **FlattenHierarchy** \ + * **MergeFiles** + */ +export type CopyBehaviorType = string; +/** + * Defines values for ResourceIdentityType. + */ +export type ResourceIdentityType = "None" | "SystemAssigned"; +/** + * Defines values for DayOfWeek. + */ +export type DayOfWeek = + | "Sunday" + | "Monday" + | "Tuesday" + | "Wednesday" + | "Thursday" + | "Friday" + | "Saturday"; + +/** + * Contains response data for the getLinkedServicesByWorkspace operation. + */ +export type LinkedServiceGetLinkedServicesByWorkspaceResponse = LinkedServiceListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: LinkedServiceListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface LinkedServiceCreateOrUpdateLinkedServiceOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateLinkedService operation. + */ +export type LinkedServiceCreateOrUpdateLinkedServiceResponse = LinkedServiceResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: LinkedServiceResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface LinkedServiceGetLinkedServiceOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getLinkedService operation. + */ +export type LinkedServiceGetLinkedServiceResponse = LinkedServiceResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: LinkedServiceResource; + }; +}; + +/** + * Contains response data for the getLinkedServicesByWorkspaceNext operation. + */ +export type LinkedServiceGetLinkedServicesByWorkspaceNextResponse = LinkedServiceListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: LinkedServiceListResponse; + }; +}; + +/** + * Contains response data for the getDatasetsByWorkspace operation. + */ +export type DatasetGetDatasetsByWorkspaceResponse = DatasetListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DatasetListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface DatasetCreateOrUpdateDatasetOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateDataset operation. + */ +export type DatasetCreateOrUpdateDatasetResponse = DatasetResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DatasetResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface DatasetGetDatasetOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getDataset operation. + */ +export type DatasetGetDatasetResponse = DatasetResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DatasetResource; + }; +}; + +/** + * Contains response data for the getDatasetsByWorkspaceNext operation. + */ +export type DatasetGetDatasetsByWorkspaceNextResponse = DatasetListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DatasetListResponse; + }; +}; + +/** + * Contains response data for the getPipelinesByWorkspace operation. + */ +export type PipelineGetPipelinesByWorkspaceResponse = PipelineListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface PipelineCreateOrUpdatePipelineOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdatePipeline operation. + */ +export type PipelineCreateOrUpdatePipelineResponse = PipelineResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface PipelineGetPipelineOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getPipeline operation. + */ +export type PipelineGetPipelineResponse = PipelineResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineResource; + }; +}; + +/** + * Optional parameters. + */ +export interface PipelineCreatePipelineRunOptionalParams extends coreHttp.OperationOptions { + /** + * Parameters of the pipeline run. These parameters will be used only if the runId is not specified. + */ + parameters?: { [propertyName: string]: any }; + /** + * The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run. + */ + referencePipelineRunId?: string; + /** + * Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId. + */ + isRecovery?: boolean; + /** + * In recovery mode, the rerun will start from this activity. If not specified, all activities will run. + */ + startActivityName?: string; +} + +/** + * Contains response data for the createPipelineRun operation. + */ +export type PipelineCreatePipelineRunResponse = CreateRunResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: CreateRunResponse; + }; +}; + +/** + * Contains response data for the getPipelinesByWorkspaceNext operation. + */ +export type PipelineGetPipelinesByWorkspaceNextResponse = PipelineListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineListResponse; + }; +}; + +/** + * Contains response data for the queryPipelineRunsByWorkspace operation. + */ +export type PipelineRunQueryPipelineRunsByWorkspaceResponse = PipelineRunsQueryResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineRunsQueryResponse; + }; +}; + +/** + * Contains response data for the getPipelineRun operation. + */ +export type PipelineRunGetPipelineRunResponse = PipelineRun & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: PipelineRun; + }; +}; + +/** + * Contains response data for the queryActivityRuns operation. + */ +export type PipelineRunQueryActivityRunsResponse = ActivityRunsQueryResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ActivityRunsQueryResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface PipelineRunCancelPipelineRunOptionalParams extends coreHttp.OperationOptions { + /** + * If true, cancel all the Child pipelines that are triggered by the current pipeline. + */ + isRecursive?: boolean; +} + +/** + * Contains response data for the getTriggersByWorkspace operation. + */ +export type TriggerGetTriggersByWorkspaceResponse = TriggerListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface TriggerCreateOrUpdateTriggerOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateTrigger operation. + */ +export type TriggerCreateOrUpdateTriggerResponse = TriggerResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface TriggerGetTriggerOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getTrigger operation. + */ +export type TriggerGetTriggerResponse = TriggerResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerResource; + }; +}; + +/** + * Contains response data for the subscribeTriggerToEvents operation. + */ +export type TriggerSubscribeTriggerToEventsResponse = TriggerSubscriptionOperationStatus & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerSubscriptionOperationStatus; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the getEventSubscriptionStatus operation. + */ +export type TriggerGetEventSubscriptionStatusResponse = TriggerSubscriptionOperationStatus & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerSubscriptionOperationStatus; + }; +}; + +/** + * Contains response data for the unsubscribeTriggerFromEvents operation. + */ +export type TriggerUnsubscribeTriggerFromEventsResponse = TriggerSubscriptionOperationStatus & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerSubscriptionOperationStatus; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the getTriggersByWorkspaceNext operation. + */ +export type TriggerGetTriggersByWorkspaceNextResponse = TriggerListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerListResponse; + }; +}; + +/** + * Contains response data for the queryTriggerRunsByWorkspace operation. + */ +export type TriggerRunQueryTriggerRunsByWorkspaceResponse = TriggerRunsQueryResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: TriggerRunsQueryResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface DataFlowCreateOrUpdateDataFlowOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateDataFlow operation. + */ +export type DataFlowCreateOrUpdateDataFlowResponse = DataFlowResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface DataFlowGetDataFlowOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the data flow entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getDataFlow operation. + */ +export type DataFlowGetDataFlowResponse = DataFlowResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowResource; + }; +}; + +/** + * Contains response data for the getDataFlowsByWorkspace operation. + */ +export type DataFlowGetDataFlowsByWorkspaceResponse = DataFlowListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowListResponse; + }; +}; + +/** + * Contains response data for the getDataFlowsByWorkspaceNext operation. + */ +export type DataFlowGetDataFlowsByWorkspaceNextResponse = DataFlowListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowListResponse; + }; +}; + +/** + * Contains response data for the createDataFlowDebugSession operation. + */ +export type DataFlowDebugSessionCreateDataFlowDebugSessionResponse = CreateDataFlowDebugSessionResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: CreateDataFlowDebugSessionResponse; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the queryDataFlowDebugSessionsByWorkspace operation. + */ +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse = QueryDataFlowDebugSessionsResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: QueryDataFlowDebugSessionsResponse; + }; +}; + +/** + * Contains response data for the addDataFlow operation. + */ +export type DataFlowDebugSessionAddDataFlowResponse = AddDataFlowToDebugSessionResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: AddDataFlowToDebugSessionResponse; + }; +}; + +/** + * Contains response data for the executeCommand operation. + */ +export type DataFlowDebugSessionExecuteCommandResponse = DataFlowDebugCommandResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: DataFlowDebugCommandResponse; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the queryDataFlowDebugSessionsByWorkspaceNext operation. + */ +export type DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse = QueryDataFlowDebugSessionsResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: QueryDataFlowDebugSessionsResponse; + }; +}; + +/** + * Contains response data for the getSqlScriptsByWorkspace operation. + */ +export type SqlScriptGetSqlScriptsByWorkspaceResponse = SqlScriptsListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlScriptsListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface SqlScriptCreateOrUpdateSqlScriptOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the SQL script entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateSqlScript operation. + */ +export type SqlScriptCreateOrUpdateSqlScriptResponse = SqlScriptResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlScriptResource; + }; +}; + +/** + * Optional parameters. + */ +export interface SqlScriptGetSqlScriptOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the sql compute entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getSqlScript operation. + */ +export type SqlScriptGetSqlScriptResponse = SqlScriptResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlScriptResource; + }; +}; + +/** + * Contains response data for the getSqlScriptsByWorkspaceNext operation. + */ +export type SqlScriptGetSqlScriptsByWorkspaceNextResponse = SqlScriptsListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlScriptsListResponse; + }; +}; + +/** + * Contains response data for the getSparkJobDefinitionsByWorkspace operation. + */ +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse = SparkJobDefinitionsListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobDefinitionsListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the Spark Job Definition entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateSparkJobDefinition operation. + */ +export type SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse = SparkJobDefinitionResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobDefinitionResource; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkJobDefinitionGetSparkJobDefinitionOptionalParams + extends coreHttp.OperationOptions { + /** + * ETag of the Spark Job Definition entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getSparkJobDefinition operation. + */ +export type SparkJobDefinitionGetSparkJobDefinitionResponse = SparkJobDefinitionResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobDefinitionResource; + }; +}; + +/** + * Contains response data for the executeSparkJobDefinition operation. + */ +export type SparkJobDefinitionExecuteSparkJobDefinitionResponse = SparkBatchJob & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJob; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the debugSparkJobDefinition operation. + */ +export type SparkJobDefinitionDebugSparkJobDefinitionResponse = SparkBatchJob & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJob; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Contains response data for the getSparkJobDefinitionsByWorkspaceNext operation. + */ +export type SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse = SparkJobDefinitionsListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobDefinitionsListResponse; + }; +}; + +/** + * Contains response data for the getNotebooksByWorkspace operation. + */ +export type NotebookGetNotebooksByWorkspaceResponse = NotebookListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookListResponse; + }; +}; + +/** + * Contains response data for the getNotebookSummaryByWorkSpace operation. + */ +export type NotebookGetNotebookSummaryByWorkSpaceResponse = NotebookListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface NotebookCreateOrUpdateNotebookOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the Note book entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. + */ + ifMatch?: string; +} + +/** + * Contains response data for the createOrUpdateNotebook operation. + */ +export type NotebookCreateOrUpdateNotebookResponse = NotebookResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookResource; + /** + * The parsed HTTP response headers. + */ + [LROSYM]: LROResponseInfo; + }; +}; + +/** + * Optional parameters. + */ +export interface NotebookGetNotebookOptionalParams extends coreHttp.OperationOptions { + /** + * ETag of the Notebook entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned. + */ + ifNoneMatch?: string; +} + +/** + * Contains response data for the getNotebook operation. + */ +export type NotebookGetNotebookResponse = NotebookResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookResource; + }; +}; + +/** + * Contains response data for the getNotebooksByWorkspaceNext operation. + */ +export type NotebookGetNotebooksByWorkspaceNextResponse = NotebookListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookListResponse; + }; +}; + +/** + * Contains response data for the getNotebookSummaryByWorkSpaceNext operation. + */ +export type NotebookGetNotebookSummaryByWorkSpaceNextResponse = NotebookListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: NotebookListResponse; + }; +}; + +/** + * Contains response data for the get operation. + */ +export type WorkspaceGetResponse = Workspace & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: Workspace; + }; +}; + +/** + * Contains response data for the list operation. + */ +export type SqlPoolsListResponse = SqlPoolInfoListResult & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlPoolInfoListResult; + }; +}; + +/** + * Contains response data for the get operation. + */ +export type SqlPoolsGetResponse = SqlPool & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlPool; + }; +}; + +/** + * Contains response data for the list operation. + */ +export type BigDataPoolsListResponse = BigDataPoolResourceInfoListResult & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: BigDataPoolResourceInfoListResult; + }; +}; + +/** + * Contains response data for the get operation. + */ +export type BigDataPoolsGetResponse = BigDataPoolResourceInfo & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: BigDataPoolResourceInfo; + }; +}; + +/** + * Contains response data for the list operation. + */ +export type IntegrationRuntimesListResponse = IntegrationRuntimeListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: IntegrationRuntimeListResponse; + }; +}; + +/** + * Contains response data for the get operation. + */ +export type IntegrationRuntimesGetResponse = IntegrationRuntimeResource & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: IntegrationRuntimeResource; + }; +}; + +/** + * Optional parameters. + */ +export interface WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams + extends coreHttp.OperationOptions { + /** + * Can provide a guid, which is helpful for debugging and to provide better customer support + */ + clientRequestId?: string; +} + +/** + * Contains response data for the getGitHubAccessToken operation. + */ +export type WorkspaceGitRepoManagementGetGitHubAccessTokenResponse = GitHubAccessTokenResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: GitHubAccessTokenResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface ArtifactsClientOptionalParams extends coreHttp.ServiceClientOptions { + /** + * Api Version + */ + apiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-artifacts/src/models/mappers.ts b/sdk/synapse/synapse-artifacts/src/models/mappers.ts new file mode 100644 index 000000000000..ca53b45a33f6 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/models/mappers.ts @@ -0,0 +1,21517 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +export const LinkedServiceListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const LinkedService: coreHttp.CompositeMapper = { + serializedName: "LinkedService", + type: { + name: "Composite", + className: "LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + connectVia: { + serializedName: "connectVia", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "ParameterSpecification" } + } + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + } + } + } +}; + +export const IntegrationRuntimeReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const ParameterSpecification: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ParameterSpecification", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + defaultValue: { + serializedName: "defaultValue", + type: { + name: "any" + } + } + } + } +}; + +export const Resource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Resource", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + readOnly: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const CloudError: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CloudError", + modelProperties: { + code: { + serializedName: "error.code", + required: true, + type: { + name: "String" + } + }, + message: { + serializedName: "error.message", + required: true, + type: { + name: "String" + } + }, + target: { + serializedName: "error.target", + type: { + name: "String" + } + }, + details: { + serializedName: "error.details", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CloudError" + } + } + } + } + } + } +}; + +export const ArtifactRenameRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ArtifactRenameRequest", + modelProperties: { + newName: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "newName", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const Dataset: coreHttp.CompositeMapper = { + serializedName: "Dataset", + type: { + name: "Composite", + className: "Dataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + structure: { + serializedName: "structure", + type: { + name: "any" + } + }, + schema: { + serializedName: "schema", + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "ParameterSpecification" } + } + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + folder: { + serializedName: "folder", + type: { + name: "Composite", + className: "DatasetFolder" + } + } + } + } +}; + +export const LinkedServiceReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const DatasetFolder: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetFolder", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const PipelineListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PipelineResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const Activity: coreHttp.CompositeMapper = { + serializedName: "Activity", + type: { + name: "Composite", + className: "Activity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + dependsOn: { + serializedName: "dependsOn", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ActivityDependency" + } + } + } + }, + userProperties: { + serializedName: "userProperties", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "UserProperty" + } + } + } + } + } + } +}; + +export const ActivityDependency: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityDependency", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + activity: { + serializedName: "activity", + required: true, + type: { + name: "String" + } + }, + dependencyConditions: { + serializedName: "dependencyConditions", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const UserProperty: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "UserProperty", + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + value: { + serializedName: "value", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const VariableSpecification: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "VariableSpecification", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + defaultValue: { + serializedName: "defaultValue", + type: { + name: "any" + } + } + } + } +}; + +export const PipelineFolder: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineFolder", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const CreateRunResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CreateRunResponse", + modelProperties: { + runId: { + serializedName: "runId", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const RunFilterParameters: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RunFilterParameters", + modelProperties: { + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } + }, + lastUpdatedAfter: { + serializedName: "lastUpdatedAfter", + required: true, + type: { + name: "DateTime" + } + }, + lastUpdatedBefore: { + serializedName: "lastUpdatedBefore", + required: true, + type: { + name: "DateTime" + } + }, + filters: { + serializedName: "filters", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RunQueryFilter" + } + } + } + }, + orderBy: { + serializedName: "orderBy", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RunQueryOrderBy" + } + } + } + } + } + } +}; + +export const RunQueryFilter: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RunQueryFilter", + modelProperties: { + operand: { + serializedName: "operand", + required: true, + type: { + name: "String" + } + }, + operator: { + serializedName: "operator", + required: true, + type: { + name: "String" + } + }, + values: { + serializedName: "values", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const RunQueryOrderBy: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RunQueryOrderBy", + modelProperties: { + orderBy: { + serializedName: "orderBy", + required: true, + type: { + name: "String" + } + }, + order: { + serializedName: "order", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const PipelineRunsQueryResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PipelineRun" + } + } + } + }, + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } + } + } + } +}; + +export const PipelineRun: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineRun", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + runId: { + serializedName: "runId", + readOnly: true, + type: { + name: "String" + } + }, + runGroupId: { + serializedName: "runGroupId", + readOnly: true, + type: { + name: "String" + } + }, + isLatest: { + serializedName: "isLatest", + readOnly: true, + type: { + name: "Boolean" + } + }, + pipelineName: { + serializedName: "pipelineName", + readOnly: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + invokedBy: { + serializedName: "invokedBy", + type: { + name: "Composite", + className: "PipelineRunInvokedBy" + } + }, + lastUpdated: { + serializedName: "lastUpdated", + readOnly: true, + type: { + name: "DateTime" + } + }, + runStart: { + serializedName: "runStart", + readOnly: true, + type: { + name: "DateTime" + } + }, + runEnd: { + serializedName: "runEnd", + readOnly: true, + type: { + name: "DateTime" + } + }, + durationInMs: { + serializedName: "durationInMs", + readOnly: true, + type: { + name: "Number" + } + }, + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const PipelineRunInvokedBy: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineRunInvokedBy", + modelProperties: { + name: { + serializedName: "name", + readOnly: true, + type: { + name: "String" + } + }, + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + invokedByType: { + serializedName: "invokedByType", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const ActivityRunsQueryResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ActivityRun" + } + } + } + }, + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } + } + } + } +}; + +export const ActivityRun: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityRun", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + pipelineName: { + serializedName: "pipelineName", + readOnly: true, + type: { + name: "String" + } + }, + pipelineRunId: { + serializedName: "pipelineRunId", + readOnly: true, + type: { + name: "String" + } + }, + activityName: { + serializedName: "activityName", + readOnly: true, + type: { + name: "String" + } + }, + activityType: { + serializedName: "activityType", + readOnly: true, + type: { + name: "String" + } + }, + activityRunId: { + serializedName: "activityRunId", + readOnly: true, + type: { + name: "String" + } + }, + linkedServiceName: { + serializedName: "linkedServiceName", + readOnly: true, + type: { + name: "String" + } + }, + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + activityRunStart: { + serializedName: "activityRunStart", + readOnly: true, + type: { + name: "DateTime" + } + }, + activityRunEnd: { + serializedName: "activityRunEnd", + readOnly: true, + type: { + name: "DateTime" + } + }, + durationInMs: { + serializedName: "durationInMs", + readOnly: true, + type: { + name: "Number" + } + }, + input: { + serializedName: "input", + readOnly: true, + type: { + name: "any" + } + }, + output: { + serializedName: "output", + readOnly: true, + type: { + name: "any" + } + }, + error: { + serializedName: "error", + readOnly: true, + type: { + name: "any" + } + } + } + } +}; + +export const TriggerListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "TriggerResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const Trigger: coreHttp.CompositeMapper = { + serializedName: "Trigger", + type: { + name: "Composite", + className: "Trigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + runtimeState: { + serializedName: "runtimeState", + readOnly: true, + type: { + name: "String" + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + } + } + } +}; + +export const TriggerSubscriptionOperationStatus: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerSubscriptionOperationStatus", + modelProperties: { + triggerName: { + serializedName: "triggerName", + readOnly: true, + type: { + name: "String" + } + }, + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const TriggerRunsQueryResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerRunsQueryResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "TriggerRun" + } + } + } + }, + continuationToken: { + serializedName: "continuationToken", + type: { + name: "String" + } + } + } + } +}; + +export const TriggerRun: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerRun", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + triggerRunId: { + serializedName: "triggerRunId", + readOnly: true, + type: { + name: "String" + } + }, + triggerName: { + serializedName: "triggerName", + readOnly: true, + type: { + name: "String" + } + }, + triggerType: { + serializedName: "triggerType", + readOnly: true, + type: { + name: "String" + } + }, + triggerRunTimestamp: { + serializedName: "triggerRunTimestamp", + readOnly: true, + type: { + name: "DateTime" + } + }, + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + readOnly: true, + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + triggeredPipelines: { + serializedName: "triggeredPipelines", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; + +export const DataFlow: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlow", + uberParent: "DataFlow", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + annotations: { + serializedName: "annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + folder: { + serializedName: "folder", + type: { + name: "Composite", + className: "DataFlowFolder" + } + } + } + } +}; + +export const DataFlowFolder: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowFolder", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const CreateDataFlowDebugSessionRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CreateDataFlowDebugSessionRequest", + modelProperties: { + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + existingClusterId: { + serializedName: "existingClusterId", + type: { + name: "String" + } + }, + clusterTimeout: { + serializedName: "clusterTimeout", + type: { + name: "Number" + } + }, + newClusterName: { + serializedName: "newClusterName", + type: { + name: "String" + } + }, + newClusterNodeType: { + serializedName: "newClusterNodeType", + type: { + name: "String" + } + }, + dataBricksLinkedService: { + serializedName: "dataBricksLinkedService", + type: { + name: "Composite", + className: "LinkedServiceResource" + } + } + } + } +}; + +export const CreateDataFlowDebugSessionResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CreateDataFlowDebugSessionResponse", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + } + } + } +}; + +export const QueryDataFlowDebugSessionsResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "QueryDataFlowDebugSessionsResponse", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowDebugSessionInfo" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugSessionInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugSessionInfo", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + computeType: { + serializedName: "computeType", + type: { + name: "String" + } + }, + coreCount: { + serializedName: "coreCount", + type: { + name: "Number" + } + }, + nodeCount: { + serializedName: "nodeCount", + type: { + name: "Number" + } + }, + integrationRuntimeName: { + serializedName: "integrationRuntimeName", + type: { + name: "String" + } + }, + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + startTime: { + serializedName: "startTime", + type: { + name: "String" + } + }, + timeToLiveInMinutes: { + serializedName: "timeToLiveInMinutes", + type: { + name: "Number" + } + }, + lastActivityTime: { + serializedName: "lastActivityTime", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugPackage: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugPackage", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlow: { + serializedName: "dataFlow", + type: { + name: "Composite", + className: "DataFlowDebugResource" + } + }, + datasets: { + serializedName: "datasets", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetDebugResource" + } + } + } + }, + linkedServices: { + serializedName: "linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceDebugResource" + } + } + } + }, + staging: { + serializedName: "staging", + type: { + name: "Composite", + className: "DataFlowStagingInfo" + } + }, + debugSettings: { + serializedName: "debugSettings", + type: { + name: "Composite", + className: "DataFlowDebugPackageDebugSettings" + } + } + } + } +}; + +export const SubResourceDebugResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SubResourceDebugResource", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowStagingInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowStagingInfo", + modelProperties: { + linkedService: { + serializedName: "linkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + folderPath: { + serializedName: "folderPath", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugPackageDebugSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugPackageDebugSettings", + modelProperties: { + sourceSettings: { + serializedName: "sourceSettings", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSourceSetting" + } + } + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + datasetParameters: { + serializedName: "datasetParameters", + type: { + name: "any" + } + } + } + } +}; + +export const DataFlowSourceSetting: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowSourceSetting", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + sourceName: { + serializedName: "sourceName", + type: { + name: "String" + } + }, + rowLimit: { + serializedName: "rowLimit", + type: { + name: "Number" + } + } + } + } +}; + +export const AddDataFlowToDebugSessionResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AddDataFlowToDebugSessionResponse", + modelProperties: { + jobVersion: { + serializedName: "jobVersion", + type: { + name: "String" + } + } + } + } +}; + +export const DeleteDataFlowDebugSessionRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DeleteDataFlowDebugSessionRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugCommandRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugCommandRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + required: true, + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + commandName: { + serializedName: "commandName", + type: { + name: "String" + } + }, + commandPayload: { + serializedName: "commandPayload", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const DataFlowDebugCommandResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugCommandResponse", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + data: { + serializedName: "data", + type: { + name: "String" + } + } + } + } +}; + +export const SqlScriptsListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScriptsListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SqlScriptResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const SqlScriptResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScriptResource", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + readOnly: true, + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "SqlScript" + } + } + } + } +}; + +export const SqlScript: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScript", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + description: { + serializedName: "description", + type: { + name: "String" + } + }, + type: { + serializedName: "type", + type: { + name: "String" + } + }, + content: { + serializedName: "content", + type: { + name: "Composite", + className: "SqlScriptContent" + } + } + } + } +}; + +export const SqlScriptContent: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScriptContent", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + query: { + serializedName: "query", + required: true, + type: { + name: "String" + } + }, + currentConnection: { + serializedName: "currentConnection", + type: { + name: "Composite", + className: "SqlConnection" + } + }, + metadata: { + serializedName: "metadata", + type: { + name: "Composite", + className: "SqlScriptMetadata" + } + } + } + } +}; + +export const SqlConnection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlConnection", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SqlScriptMetadata: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlScriptMetadata", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + language: { + serializedName: "language", + type: { + name: "String" + } + } + } + } +}; + +export const SparkJobDefinitionsListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobDefinitionsListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkJobDefinitionResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const SparkJobDefinition: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobDefinition", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + description: { + serializedName: "description", + type: { + name: "String" + } + }, + targetBigDataPool: { + serializedName: "targetBigDataPool", + type: { + name: "Composite", + className: "BigDataPoolReference" + } + }, + requiredSparkVersion: { + serializedName: "requiredSparkVersion", + type: { + name: "String" + } + }, + language: { + serializedName: "language", + type: { + name: "String" + } + }, + jobProperties: { + serializedName: "jobProperties", + type: { + name: "Composite", + className: "SparkJobProperties" + } + } + } + } +}; + +export const BigDataPoolReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "BigDataPoolReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SparkJobProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + file: { + serializedName: "file", + required: true, + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + conf: { + serializedName: "conf", + type: { + name: "any" + } + }, + args: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + driverMemory: { + serializedName: "driverMemory", + required: true, + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + required: true, + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + required: true, + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + required: true, + type: { + name: "Number" + } + }, + numExecutors: { + serializedName: "numExecutors", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const SparkBatchJob: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJob", + modelProperties: { + livyInfo: { + serializedName: "livyInfo", + type: { + name: "Composite", + className: "SparkBatchJobState" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + workspaceName: { + serializedName: "workspaceName", + type: { + name: "String" + } + }, + sparkPoolName: { + serializedName: "sparkPoolName", + type: { + name: "String" + } + }, + submitterName: { + serializedName: "submitterName", + type: { + name: "String" + } + }, + submitterId: { + serializedName: "submitterId", + type: { + name: "String" + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + jobType: { + serializedName: "jobType", + type: { + name: "String" + } + }, + result: { + serializedName: "result", + type: { + name: "String" + } + }, + scheduler: { + serializedName: "schedulerInfo", + type: { + name: "Composite", + className: "SparkScheduler" + } + }, + plugin: { + serializedName: "pluginInfo", + type: { + name: "Composite", + className: "SparkServicePlugin" + } + }, + errors: { + serializedName: "errorInfo", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkServiceError" + } + } + } + }, + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + id: { + serializedName: "id", + required: true, + type: { + name: "Number" + } + }, + appId: { + serializedName: "appId", + nullable: true, + type: { + name: "String" + } + }, + appInfo: { + serializedName: "appInfo", + nullable: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + state: { + serializedName: "state", + type: { + name: "String" + } + }, + logLines: { + serializedName: "log", + nullable: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const SparkBatchJobState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJobState", + modelProperties: { + notStartedAt: { + serializedName: "notStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + startingAt: { + serializedName: "startingAt", + nullable: true, + type: { + name: "DateTime" + } + }, + runningAt: { + serializedName: "runningAt", + nullable: true, + type: { + name: "DateTime" + } + }, + deadAt: { + serializedName: "deadAt", + nullable: true, + type: { + name: "DateTime" + } + }, + successAt: { + serializedName: "successAt", + nullable: true, + type: { + name: "DateTime" + } + }, + terminatedAt: { + serializedName: "killedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + recoveringAt: { + serializedName: "recoveringAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + }, + jobCreationRequest: { + serializedName: "jobCreationRequest", + type: { + name: "Composite", + className: "SparkRequest" + } + } + } + } +}; + +export const SparkRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkRequest", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + file: { + serializedName: "file", + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + arguments: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + pythonFiles: { + serializedName: "pyFiles", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + configuration: { + serializedName: "conf", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + driverMemory: { + serializedName: "driverMemory", + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + type: { + name: "Number" + } + }, + executorCount: { + serializedName: "numExecutors", + type: { + name: "Number" + } + } + } + } +}; + +export const SparkScheduler: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkScheduler", + modelProperties: { + submittedAt: { + serializedName: "submittedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + scheduledAt: { + serializedName: "scheduledAt", + nullable: true, + type: { + name: "DateTime" + } + }, + endedAt: { + serializedName: "endedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + cancellationRequestedAt: { + serializedName: "cancellationRequestedAt", + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + } + } + } +}; + +export const SparkServicePlugin: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkServicePlugin", + modelProperties: { + preparationStartedAt: { + serializedName: "preparationStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + resourceAcquisitionStartedAt: { + serializedName: "resourceAcquisitionStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + submissionStartedAt: { + serializedName: "submissionStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + monitoringStartedAt: { + serializedName: "monitoringStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + cleanupStartedAt: { + serializedName: "cleanupStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + } + } + } +}; + +export const SparkServiceError: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkServiceError", + modelProperties: { + message: { + serializedName: "message", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "errorCode", + type: { + name: "String" + } + }, + source: { + serializedName: "source", + type: { + name: "String" + } + } + } + } +}; + +export const NotebookListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "NotebookResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const NotebookResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookResource", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + readOnly: true, + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Notebook" + } + } + } + } +}; + +export const Notebook: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Notebook", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + description: { + serializedName: "description", + type: { + name: "String" + } + }, + bigDataPool: { + serializedName: "bigDataPool", + type: { + name: "Composite", + className: "BigDataPoolReference" + } + }, + sessionProperties: { + serializedName: "sessionProperties", + type: { + name: "Composite", + className: "NotebookSessionProperties" + } + }, + metadata: { + serializedName: "metadata", + type: { + name: "Composite", + className: "NotebookMetadata" + } + }, + nbformat: { + serializedName: "nbformat", + required: true, + type: { + name: "Number" + } + }, + nbformatMinor: { + serializedName: "nbformat_minor", + required: true, + type: { + name: "Number" + } + }, + cells: { + serializedName: "cells", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "NotebookCell" + } + } + } + } + } + } +}; + +export const NotebookSessionProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookSessionProperties", + modelProperties: { + driverMemory: { + serializedName: "driverMemory", + required: true, + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + required: true, + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + required: true, + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + required: true, + type: { + name: "Number" + } + }, + numExecutors: { + serializedName: "numExecutors", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const NotebookMetadata: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookMetadata", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + kernelspec: { + serializedName: "kernelspec", + type: { + name: "Composite", + className: "NotebookKernelSpec" + } + }, + languageInfo: { + serializedName: "language_info", + type: { + name: "Composite", + className: "NotebookLanguageInfo" + } + } + } + } +}; + +export const NotebookKernelSpec: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookKernelSpec", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + displayName: { + serializedName: "display_name", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const NotebookLanguageInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookLanguageInfo", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + codemirrorMode: { + serializedName: "codemirror_mode", + type: { + name: "String" + } + } + } + } +}; + +export const NotebookCell: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookCell", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + cellType: { + serializedName: "cell_type", + required: true, + type: { + name: "String" + } + }, + metadata: { + serializedName: "metadata", + required: true, + type: { + name: "any" + } + }, + source: { + serializedName: "source", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + attachments: { + serializedName: "attachments", + type: { + name: "any" + } + }, + outputs: { + serializedName: "outputs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "NotebookCellOutputItem" + } + } + } + } + } + } +}; + +export const NotebookCellOutputItem: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NotebookCellOutputItem", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + executionCount: { + serializedName: "execution_count", + type: { + name: "Number" + } + }, + outputType: { + serializedName: "output_type", + required: true, + type: { + name: "String" + } + }, + text: { + serializedName: "text", + type: { + name: "any" + } + }, + data: { + serializedName: "data", + type: { + name: "any" + } + }, + metadata: { + serializedName: "metadata", + type: { + name: "any" + } + } + } + } +}; + +export const DataLakeStorageAccountDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataLakeStorageAccountDetails", + modelProperties: { + accountUrl: { + serializedName: "accountUrl", + type: { + name: "String" + } + }, + filesystem: { + serializedName: "filesystem", + type: { + name: "String" + } + } + } + } +}; + +export const VirtualNetworkProfile: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "VirtualNetworkProfile", + modelProperties: { + computeSubnetId: { + serializedName: "computeSubnetId", + type: { + name: "String" + } + } + } + } +}; + +export const PrivateEndpoint: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PrivateEndpoint", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const PrivateLinkServiceConnectionState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PrivateLinkServiceConnectionState", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + actionsRequired: { + serializedName: "actionsRequired", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const EncryptionDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "EncryptionDetails", + modelProperties: { + doubleEncryptionEnabled: { + serializedName: "doubleEncryptionEnabled", + readOnly: true, + type: { + name: "Boolean" + } + }, + cmk: { + serializedName: "cmk", + type: { + name: "Composite", + className: "CustomerManagedKeyDetails" + } + } + } + } +}; + +export const CustomerManagedKeyDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CustomerManagedKeyDetails", + modelProperties: { + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + key: { + serializedName: "key", + type: { + name: "Composite", + className: "WorkspaceKeyDetails" + } + } + } + } +}; + +export const WorkspaceKeyDetails: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WorkspaceKeyDetails", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + keyVaultUrl: { + serializedName: "keyVaultUrl", + type: { + name: "String" + } + } + } + } +}; + +export const ManagedVirtualNetworkSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedVirtualNetworkSettings", + modelProperties: { + preventDataExfiltration: { + serializedName: "preventDataExfiltration", + type: { + name: "Boolean" + } + }, + linkedAccessCheckOnTargetResource: { + serializedName: "linkedAccessCheckOnTargetResource", + type: { + name: "Boolean" + } + }, + allowedAadTenantIdsForLinking: { + serializedName: "allowedAadTenantIdsForLinking", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const WorkspaceRepositoryConfiguration: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WorkspaceRepositoryConfiguration", + modelProperties: { + type: { + serializedName: "type", + type: { + name: "String" + } + }, + hostName: { + serializedName: "hostName", + type: { + name: "String" + } + }, + accountName: { + serializedName: "accountName", + type: { + name: "String" + } + }, + projectName: { + serializedName: "projectName", + type: { + name: "String" + } + }, + repositoryName: { + serializedName: "repositoryName", + type: { + name: "String" + } + }, + collaborationBranch: { + serializedName: "collaborationBranch", + type: { + name: "String" + } + }, + rootFolder: { + serializedName: "rootFolder", + type: { + name: "String" + } + } + } + } +}; + +export const PurviewConfiguration: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PurviewConfiguration", + modelProperties: { + purviewResourceId: { + serializedName: "purviewResourceId", + type: { + name: "String" + } + } + } + } +}; + +export const ManagedIdentity: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedIdentity", + modelProperties: { + principalId: { + serializedName: "principalId", + readOnly: true, + type: { + name: "String" + } + }, + tenantId: { + serializedName: "tenantId", + readOnly: true, + type: { + name: "Uuid" + } + }, + type: { + serializedName: "type", + type: { + name: "Enum", + allowedValues: ["None", "SystemAssigned"] + } + } + } + } +}; + +export const ErrorContract: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorContract", + modelProperties: { + error: { + serializedName: "error", + type: { + name: "Composite", + className: "ErrorResponse" + } + } + } + } +}; + +export const ErrorResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorResponse", + modelProperties: { + code: { + serializedName: "code", + readOnly: true, + type: { + name: "String" + } + }, + message: { + serializedName: "message", + readOnly: true, + type: { + name: "String" + } + }, + target: { + serializedName: "target", + readOnly: true, + type: { + name: "String" + } + }, + details: { + serializedName: "details", + readOnly: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorResponse" + } + } + } + }, + additionalInfo: { + serializedName: "additionalInfo", + readOnly: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ErrorAdditionalInfo" + } + } + } + } + } + } +}; + +export const ErrorAdditionalInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ErrorAdditionalInfo", + modelProperties: { + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + info: { + serializedName: "info", + readOnly: true, + type: { + name: "any" + } + } + } + } +}; + +export const SqlPoolInfoListResult: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlPoolInfoListResult", + modelProperties: { + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + }, + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SqlPool" + } + } + } + } + } + } +}; + +export const Sku: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Sku", + modelProperties: { + tier: { + serializedName: "tier", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + capacity: { + serializedName: "capacity", + type: { + name: "Number" + } + } + } + } +}; + +export const BigDataPoolResourceInfoListResult: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "BigDataPoolResourceInfoListResult", + modelProperties: { + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + }, + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BigDataPoolResourceInfo" + } + } + } + } + } + } +}; + +export const AutoScaleProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AutoScaleProperties", + modelProperties: { + minNodeCount: { + serializedName: "minNodeCount", + type: { + name: "Number" + } + }, + enabled: { + serializedName: "enabled", + type: { + name: "Boolean" + } + }, + maxNodeCount: { + serializedName: "maxNodeCount", + type: { + name: "Number" + } + } + } + } +}; + +export const AutoPauseProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AutoPauseProperties", + modelProperties: { + delayInMinutes: { + serializedName: "delayInMinutes", + type: { + name: "Number" + } + }, + enabled: { + serializedName: "enabled", + type: { + name: "Boolean" + } + } + } + } +}; + +export const LibraryRequirements: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LibraryRequirements", + modelProperties: { + time: { + serializedName: "time", + readOnly: true, + type: { + name: "DateTime" + } + }, + content: { + serializedName: "content", + type: { + name: "String" + } + }, + filename: { + serializedName: "filename", + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntimeListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "IntegrationRuntimeResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntime: coreHttp.CompositeMapper = { + serializedName: "IntegrationRuntime", + type: { + name: "Composite", + className: "IntegrationRuntime", + uberParent: "IntegrationRuntime", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + } + } + } +}; + +export const GitHubAccessTokenRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "GitHubAccessTokenRequest", + modelProperties: { + gitHubClientId: { + serializedName: "gitHubClientId", + required: true, + type: { + name: "String" + } + }, + gitHubAccessCode: { + serializedName: "gitHubAccessCode", + required: true, + type: { + name: "String" + } + }, + gitHubAccessTokenBaseUrl: { + serializedName: "gitHubAccessTokenBaseUrl", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const GitHubAccessTokenResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "GitHubAccessTokenResponse", + modelProperties: { + gitHubAccessToken: { + serializedName: "gitHubAccessToken", + type: { + name: "String" + } + } + } + } +}; + +export const Expression: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Expression", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + value: { + serializedName: "value", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SecretBase: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SecretBase", + uberParent: "SecretBase", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const StartDataFlowDebugSessionRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "StartDataFlowDebugSessionRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlow: { + serializedName: "dataFlow", + type: { + name: "Composite", + className: "DataFlowResource" + } + }, + datasets: { + serializedName: "datasets", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetResource" + } + } + } + }, + linkedServices: { + serializedName: "linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceResource" + } + } + } + }, + staging: { + serializedName: "staging", + type: { + name: "any" + } + }, + debugSettings: { + serializedName: "debugSettings", + type: { + name: "any" + } + }, + incrementalDebug: { + serializedName: "incrementalDebug", + type: { + name: "Boolean" + } + } + } + } +}; + +export const StartDataFlowDebugSessionResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "StartDataFlowDebugSessionResponse", + modelProperties: { + jobVersion: { + serializedName: "jobVersion", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugPreviewDataRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugPreviewDataRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + rowLimits: { + serializedName: "rowLimits", + type: { + name: "Number" + } + } + } + } +}; + +export const DataFlowDebugStatisticsRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugStatisticsRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + columns: { + serializedName: "columns", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const EvaluateDataFlowExpressionRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "EvaluateDataFlowExpressionRequest", + modelProperties: { + sessionId: { + serializedName: "sessionId", + type: { + name: "String" + } + }, + dataFlowName: { + serializedName: "dataFlowName", + type: { + name: "String" + } + }, + streamName: { + serializedName: "streamName", + type: { + name: "String" + } + }, + rowLimits: { + serializedName: "rowLimits", + type: { + name: "Number" + } + }, + expression: { + serializedName: "expression", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugQueryResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugQueryResponse", + modelProperties: { + runId: { + serializedName: "runId", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugResultResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugResultResponse", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + data: { + serializedName: "data", + type: { + name: "String" + } + } + } + } +}; + +export const TriggerDependencyProvisioningStatus: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerDependencyProvisioningStatus", + modelProperties: { + triggerName: { + serializedName: "triggerName", + required: true, + type: { + name: "String" + } + }, + provisioningStatus: { + serializedName: "provisioningStatus", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const PipelineReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + } + } + } +}; + +export const TriggerPipelineReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerPipelineReference", + modelProperties: { + pipelineReference: { + serializedName: "pipelineReference", + type: { + name: "Composite", + className: "PipelineReference" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const WorkspaceUpdateParameters: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WorkspaceUpdateParameters", + modelProperties: { + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + identity: { + serializedName: "identity", + type: { + name: "Composite", + className: "WorkspaceIdentity" + } + } + } + } +}; + +export const WorkspaceIdentity: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WorkspaceIdentity", + modelProperties: { + type: { + defaultValue: "SystemAssigned", + isConstant: true, + serializedName: "type", + type: { + name: "String" + } + }, + principalId: { + serializedName: "principalId", + readOnly: true, + type: { + name: "String" + } + }, + tenantId: { + serializedName: "tenantId", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const DatasetReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const DataFlowReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowReference", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + }, + datasetParameters: { + serializedName: "datasetParameters", + type: { + name: "any" + } + } + } + } +}; + +export const RerunTumblingWindowTriggerActionParameters: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RerunTumblingWindowTriggerActionParameters", + modelProperties: { + startTime: { + serializedName: "startTime", + required: true, + type: { + name: "DateTime" + } + }, + endTime: { + serializedName: "endTime", + required: true, + type: { + name: "DateTime" + } + }, + maxConcurrency: { + constraints: { + InclusiveMaximum: 50, + InclusiveMinimum: 1 + }, + serializedName: "maxConcurrency", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const RerunTriggerListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RerunTriggerListResponse", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RerunTriggerResource" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const GetSsisObjectMetadataRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "GetSsisObjectMetadataRequest", + modelProperties: { + metadataPath: { + serializedName: "metadataPath", + type: { + name: "String" + } + } + } + } +}; + +export const SsisObjectMetadataStatusResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisObjectMetadataStatusResponse", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "String" + } + }, + error: { + serializedName: "error", + type: { + name: "String" + } + } + } + } +}; + +export const ExposureControlRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ExposureControlRequest", + modelProperties: { + featureName: { + serializedName: "featureName", + type: { + name: "String" + } + }, + featureType: { + serializedName: "featureType", + type: { + name: "String" + } + } + } + } +}; + +export const ExposureControlResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ExposureControlResponse", + modelProperties: { + featureName: { + serializedName: "featureName", + readOnly: true, + type: { + name: "String" + } + }, + value: { + serializedName: "value", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const SynapseNotebookReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SynapseNotebookReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SynapseSparkJobReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SynapseSparkJobReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SqlPoolReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlPoolReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const Transformation: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Transformation", + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetLocation: coreHttp.CompositeMapper = { + serializedName: "DatasetLocation", + type: { + name: "Composite", + className: "DatasetLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + folderPath: { + serializedName: "folderPath", + type: { + name: "any" + } + }, + fileName: { + serializedName: "fileName", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetDataElement: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetDataElement", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetSchemaDataElement: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetSchemaDataElement", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + name: { + serializedName: "name", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetStorageFormat: coreHttp.CompositeMapper = { + serializedName: "DatasetStorageFormat", + type: { + name: "Composite", + className: "DatasetStorageFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + serializer: { + serializedName: "serializer", + type: { + name: "any" + } + }, + deserializer: { + serializedName: "deserializer", + type: { + name: "any" + } + } + } + } +}; + +export const DatasetCompression: coreHttp.CompositeMapper = { + serializedName: "DatasetCompression", + type: { + name: "Composite", + className: "DatasetCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const WebLinkedServiceTypeProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WebLinkedServiceTypeProperties", + uberParent: "WebLinkedServiceTypeProperties", + polymorphicDiscriminator: { + serializedName: "authenticationType", + clientName: "authenticationType" + }, + modelProperties: { + url: { + serializedName: "url", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "authenticationType", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const ScriptAction: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ScriptAction", + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + uri: { + serializedName: "uri", + required: true, + type: { + name: "String" + } + }, + roles: { + serializedName: "roles", + required: true, + type: { + name: "String" + } + }, + parameters: { + serializedName: "parameters", + type: { + name: "String" + } + } + } + } +}; + +export const ActivityPolicy: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ActivityPolicy", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + timeout: { + serializedName: "timeout", + type: { + name: "any" + } + }, + retry: { + serializedName: "retry", + type: { + name: "any" + } + }, + retryIntervalInSeconds: { + constraints: { + InclusiveMaximum: 86400, + InclusiveMinimum: 30 + }, + serializedName: "retryIntervalInSeconds", + type: { + name: "Number" + } + }, + secureInput: { + serializedName: "secureInput", + type: { + name: "Boolean" + } + }, + secureOutput: { + serializedName: "secureOutput", + type: { + name: "Boolean" + } + } + } + } +}; + +export const StoreReadSettings: coreHttp.CompositeMapper = { + serializedName: "StoreReadSettings", + type: { + name: "Composite", + className: "StoreReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + maxConcurrentConnections: { + serializedName: "maxConcurrentConnections", + type: { + name: "any" + } + } + } + } +}; + +export const StoreWriteSettings: coreHttp.CompositeMapper = { + serializedName: "StoreWriteSettings", + type: { + name: "Composite", + className: "StoreWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + maxConcurrentConnections: { + serializedName: "maxConcurrentConnections", + type: { + name: "any" + } + }, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const DistcpSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DistcpSettings", + modelProperties: { + resourceManagerEndpoint: { + serializedName: "resourceManagerEndpoint", + required: true, + type: { + name: "any" + } + }, + tempScriptPath: { + serializedName: "tempScriptPath", + required: true, + type: { + name: "any" + } + }, + distcpOptions: { + serializedName: "distcpOptions", + type: { + name: "any" + } + } + } + } +}; + +export const FormatReadSettings: coreHttp.CompositeMapper = { + serializedName: "FormatReadSettings", + type: { + name: "Composite", + className: "FormatReadSettings", + uberParent: "FormatReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const FormatWriteSettings: coreHttp.CompositeMapper = { + serializedName: "FormatWriteSettings", + type: { + name: "Composite", + className: "FormatWriteSettings", + uberParent: "FormatWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const CopySource: coreHttp.CompositeMapper = { + serializedName: "CopySource", + type: { + name: "Composite", + className: "CopySource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + sourceRetryCount: { + serializedName: "sourceRetryCount", + type: { + name: "any" + } + }, + sourceRetryWait: { + serializedName: "sourceRetryWait", + type: { + name: "any" + } + }, + maxConcurrentConnections: { + serializedName: "maxConcurrentConnections", + type: { + name: "any" + } + } + } + } +}; + +export const CopySink: coreHttp.CompositeMapper = { + serializedName: "CopySink", + type: { + name: "Composite", + className: "CopySink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + writeBatchSize: { + serializedName: "writeBatchSize", + type: { + name: "any" + } + }, + writeBatchTimeout: { + serializedName: "writeBatchTimeout", + type: { + name: "any" + } + }, + sinkRetryCount: { + serializedName: "sinkRetryCount", + type: { + name: "any" + } + }, + sinkRetryWait: { + serializedName: "sinkRetryWait", + type: { + name: "any" + } + }, + maxConcurrentConnections: { + serializedName: "maxConcurrentConnections", + type: { + name: "any" + } + } + } + } +}; + +export const StagingSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "StagingSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + path: { + serializedName: "path", + type: { + name: "any" + } + }, + enableCompression: { + serializedName: "enableCompression", + type: { + name: "any" + } + } + } + } +}; + +export const RedirectIncompatibleRowSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RedirectIncompatibleRowSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + linkedServiceName: { + serializedName: "linkedServiceName", + required: true, + type: { + name: "any" + } + }, + path: { + serializedName: "path", + type: { + name: "any" + } + } + } + } +}; + +export const SapHanaPartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SapHanaPartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + } + } + } +}; + +export const SapTablePartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SapTablePartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + }, + maxPartitionsNumber: { + serializedName: "maxPartitionsNumber", + type: { + name: "any" + } + } + } + } +}; + +export const StoredProcedureParameter: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "StoredProcedureParameter", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "String" + } + } + } + } +}; + +export const OraclePartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "OraclePartitionSettings", + modelProperties: { + partitionNames: { + serializedName: "partitionNames", + type: { + name: "any" + } + }, + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + } + } + } +}; + +export const TeradataPartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TeradataPartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbCursorMethodsProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "MongoDbCursorMethodsProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + project: { + serializedName: "project", + type: { + name: "any" + } + }, + sort: { + serializedName: "sort", + type: { + name: "any" + } + }, + skip: { + serializedName: "skip", + type: { + name: "any" + } + }, + limit: { + serializedName: "limit", + type: { + name: "any" + } + } + } + } +}; + +export const NetezzaPartitionSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "NetezzaPartitionSettings", + modelProperties: { + partitionColumnName: { + serializedName: "partitionColumnName", + type: { + name: "any" + } + }, + partitionUpperBound: { + serializedName: "partitionUpperBound", + type: { + name: "any" + } + }, + partitionLowerBound: { + serializedName: "partitionLowerBound", + type: { + name: "any" + } + } + } + } +}; + +export const RedshiftUnloadSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RedshiftUnloadSettings", + modelProperties: { + s3LinkedServiceName: { + serializedName: "s3LinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + bucketName: { + serializedName: "bucketName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const PolybaseSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PolybaseSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + rejectType: { + serializedName: "rejectType", + type: { + name: "String" + } + }, + rejectValue: { + serializedName: "rejectValue", + type: { + name: "any" + } + }, + rejectSampleValue: { + serializedName: "rejectSampleValue", + type: { + name: "any" + } + }, + useTypeDefault: { + serializedName: "useTypeDefault", + type: { + name: "any" + } + } + } + } +}; + +export const DWCopyCommandSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DWCopyCommandSettings", + modelProperties: { + defaultValues: { + serializedName: "defaultValues", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DWCopyCommandDefaultValue" + } + } + } + }, + additionalOptions: { + serializedName: "additionalOptions", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; + +export const DWCopyCommandDefaultValue: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DWCopyCommandDefaultValue", + modelProperties: { + columnName: { + serializedName: "columnName", + type: { + name: "any" + } + }, + defaultValue: { + serializedName: "defaultValue", + type: { + name: "any" + } + } + } + } +}; + +export const LogStorageSettings: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LogStorageSettings", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + path: { + serializedName: "path", + type: { + name: "any" + } + } + } + } +}; + +export const CopyTranslator: coreHttp.CompositeMapper = { + serializedName: "CopyTranslator", + type: { + name: "Composite", + className: "CopyTranslator", + uberParent: "CopyTranslator", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const SsisPackageLocation: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisPackageLocation", + modelProperties: { + packagePath: { + serializedName: "packagePath", + type: { + name: "any" + } + }, + type: { + serializedName: "type", + type: { + name: "String" + } + }, + packagePassword: { + serializedName: "typeProperties.packagePassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessCredential: { + serializedName: "typeProperties.accessCredential", + type: { + name: "Composite", + className: "SsisAccessCredential" + } + }, + configurationPath: { + serializedName: "typeProperties.configurationPath", + type: { + name: "any" + } + }, + packageName: { + serializedName: "typeProperties.packageName", + type: { + name: "String" + } + }, + packageContent: { + serializedName: "typeProperties.packageContent", + type: { + name: "any" + } + }, + packageLastModifiedDate: { + serializedName: "typeProperties.packageLastModifiedDate", + type: { + name: "String" + } + }, + childPackages: { + serializedName: "typeProperties.childPackages", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SsisChildPackage" + } + } + } + } + } + } +}; + +export const SsisAccessCredential: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisAccessCredential", + modelProperties: { + domain: { + serializedName: "domain", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "userName", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecretBase" + } + } + } + } +}; + +export const SsisChildPackage: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisChildPackage", + modelProperties: { + packagePath: { + serializedName: "packagePath", + required: true, + type: { + name: "any" + } + }, + packageName: { + serializedName: "packageName", + type: { + name: "String" + } + }, + packageContent: { + serializedName: "packageContent", + required: true, + type: { + name: "any" + } + }, + packageLastModifiedDate: { + serializedName: "packageLastModifiedDate", + type: { + name: "String" + } + } + } + } +}; + +export const SsisExecutionCredential: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisExecutionCredential", + modelProperties: { + domain: { + serializedName: "domain", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "userName", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecureString" + } + } + } + } +}; + +export const SsisExecutionParameter: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisExecutionParameter", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const SsisPropertyOverride: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisPropertyOverride", + modelProperties: { + value: { + serializedName: "value", + required: true, + type: { + name: "any" + } + }, + isSensitive: { + serializedName: "isSensitive", + type: { + name: "Boolean" + } + } + } + } +}; + +export const SsisLogLocation: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SsisLogLocation", + modelProperties: { + logPath: { + serializedName: "logPath", + required: true, + type: { + name: "any" + } + }, + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + accessCredential: { + serializedName: "typeProperties.accessCredential", + type: { + name: "Composite", + className: "SsisAccessCredential" + } + }, + logRefreshInterval: { + serializedName: "typeProperties.logRefreshInterval", + type: { + name: "any" + } + } + } + } +}; + +export const CustomActivityReferenceObject: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CustomActivityReferenceObject", + modelProperties: { + linkedServices: { + serializedName: "linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + datasets: { + serializedName: "datasets", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } + } + } +}; + +export const WebActivityAuthentication: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "WebActivityAuthentication", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + pfx: { + serializedName: "pfx", + type: { + name: "Composite", + className: "SecretBase" + } + }, + username: { + serializedName: "username", + type: { + name: "String" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + resource: { + serializedName: "resource", + type: { + name: "String" + } + } + } + } +}; + +export const SwitchCase: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SwitchCase", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "String" + } + }, + activities: { + serializedName: "activities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const AzureMLWebServiceFile: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AzureMLWebServiceFile", + modelProperties: { + filePath: { + serializedName: "filePath", + required: true, + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + } +}; + +export const ExecuteDataFlowActivityTypePropertiesCompute: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ExecuteDataFlowActivityTypePropertiesCompute", + modelProperties: { + computeType: { + serializedName: "computeType", + type: { + name: "String" + } + }, + coreCount: { + serializedName: "coreCount", + type: { + name: "Number" + } + } + } + } +}; + +export const ScheduleTriggerRecurrence: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ScheduleTriggerRecurrence", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + frequency: { + serializedName: "frequency", + type: { + name: "String" + } + }, + interval: { + serializedName: "interval", + type: { + name: "Number" + } + }, + startTime: { + serializedName: "startTime", + type: { + name: "DateTime" + } + }, + endTime: { + serializedName: "endTime", + type: { + name: "DateTime" + } + }, + timeZone: { + serializedName: "timeZone", + type: { + name: "String" + } + }, + schedule: { + serializedName: "schedule", + type: { + name: "Composite", + className: "RecurrenceSchedule" + } + } + } + } +}; + +export const RecurrenceSchedule: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RecurrenceSchedule", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + minutes: { + serializedName: "minutes", + type: { + name: "Sequence", + element: { + type: { + name: "Number" + } + } + } + }, + hours: { + serializedName: "hours", + type: { + name: "Sequence", + element: { + type: { + name: "Number" + } + } + } + }, + weekDays: { + serializedName: "weekDays", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "Sunday", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday" + ] + } + } + } + }, + monthDays: { + serializedName: "monthDays", + type: { + name: "Sequence", + element: { + type: { + name: "Number" + } + } + } + }, + monthlyOccurrences: { + serializedName: "monthlyOccurrences", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RecurrenceScheduleOccurrence" + } + } + } + } + } + } +}; + +export const RecurrenceScheduleOccurrence: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RecurrenceScheduleOccurrence", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + day: { + serializedName: "day", + type: { + name: "Enum", + allowedValues: [ + "Sunday", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday" + ] + } + }, + occurrence: { + serializedName: "occurrence", + type: { + name: "Number" + } + } + } + } +}; + +export const RetryPolicy: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RetryPolicy", + modelProperties: { + count: { + serializedName: "count", + type: { + name: "any" + } + }, + intervalInSeconds: { + constraints: { + InclusiveMaximum: 86400, + InclusiveMinimum: 30 + }, + serializedName: "intervalInSeconds", + type: { + name: "Number" + } + } + } + } +}; + +export const DependencyReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DependencyReference", + uberParent: "DependencyReference", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const TriggerReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerReference", + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntimeComputeProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeComputeProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + location: { + serializedName: "location", + type: { + name: "String" + } + }, + nodeSize: { + serializedName: "nodeSize", + type: { + name: "String" + } + }, + numberOfNodes: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "numberOfNodes", + type: { + name: "Number" + } + }, + maxParallelExecutionsPerNode: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxParallelExecutionsPerNode", + type: { + name: "Number" + } + }, + dataFlowProperties: { + serializedName: "dataFlowProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeDataFlowProperties" + } + }, + vNetProperties: { + serializedName: "vNetProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeVNetProperties" + } + } + } + } +}; + +export const IntegrationRuntimeDataFlowProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeDataFlowProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + computeType: { + serializedName: "computeType", + type: { + name: "String" + } + }, + coreCount: { + serializedName: "coreCount", + type: { + name: "Number" + } + }, + timeToLive: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeToLive", + type: { + name: "Number" + } + } + } + } +}; + +export const IntegrationRuntimeVNetProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeVNetProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + vNetId: { + serializedName: "vNetId", + type: { + name: "String" + } + }, + subnet: { + serializedName: "subnet", + type: { + name: "String" + } + }, + publicIPs: { + serializedName: "publicIPs", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const IntegrationRuntimeSsisProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeSsisProperties", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + catalogInfo: { + serializedName: "catalogInfo", + type: { + name: "Composite", + className: "IntegrationRuntimeSsisCatalogInfo" + } + }, + licenseType: { + serializedName: "licenseType", + type: { + name: "String" + } + }, + customSetupScriptProperties: { + serializedName: "customSetupScriptProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeCustomSetupScriptProperties" + } + }, + dataProxyProperties: { + serializedName: "dataProxyProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeDataProxyProperties" + } + }, + edition: { + serializedName: "edition", + type: { + name: "String" + } + }, + expressCustomSetupProperties: { + serializedName: "expressCustomSetupProperties", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CustomSetupBase" + } + } + } + } + } + } +}; + +export const IntegrationRuntimeSsisCatalogInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeSsisCatalogInfo", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + catalogServerEndpoint: { + serializedName: "catalogServerEndpoint", + type: { + name: "String" + } + }, + catalogAdminUserName: { + constraints: { + MaxLength: 128, + MinLength: 1 + }, + serializedName: "catalogAdminUserName", + type: { + name: "String" + } + }, + catalogAdminPassword: { + serializedName: "catalogAdminPassword", + type: { + name: "Composite", + className: "SecureString" + } + }, + catalogPricingTier: { + serializedName: "catalogPricingTier", + type: { + name: "String" + } + } + } + } +}; + +export const IntegrationRuntimeCustomSetupScriptProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeCustomSetupScriptProperties", + modelProperties: { + blobContainerUri: { + serializedName: "blobContainerUri", + type: { + name: "String" + } + }, + sasToken: { + serializedName: "sasToken", + type: { + name: "Composite", + className: "SecureString" + } + } + } + } +}; + +export const IntegrationRuntimeDataProxyProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeDataProxyProperties", + modelProperties: { + connectVia: { + serializedName: "connectVia", + type: { + name: "Composite", + className: "EntityReference" + } + }, + stagingLinkedService: { + serializedName: "stagingLinkedService", + type: { + name: "Composite", + className: "EntityReference" + } + }, + path: { + serializedName: "path", + type: { + name: "String" + } + } + } + } +}; + +export const EntityReference: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "EntityReference", + modelProperties: { + type: { + serializedName: "type", + type: { + name: "String" + } + }, + referenceName: { + serializedName: "referenceName", + type: { + name: "String" + } + } + } + } +}; + +export const CustomSetupBase: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "CustomSetupBase", + uberParent: "CustomSetupBase", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + type: { + serializedName: "type", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const LinkedIntegrationRuntimeType: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedIntegrationRuntimeType", + uberParent: "LinkedIntegrationRuntimeType", + polymorphicDiscriminator: { + serializedName: "authorizationType", + clientName: "authorizationType" + }, + modelProperties: { + authorizationType: { + serializedName: "authorizationType", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const AzureStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureStorage", + type: { + name: "Composite", + className: "AzureStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + sasUri: { + serializedName: "typeProperties.sasUri", + type: { + name: "any" + } + }, + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const AzureBlobStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorage", + type: { + name: "Composite", + className: "AzureBlobStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + sasUri: { + serializedName: "typeProperties.sasUri", + type: { + name: "any" + } + }, + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + serviceEndpoint: { + serializedName: "typeProperties.serviceEndpoint", + type: { + name: "String" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const AzureTableStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureTableStorage", + type: { + name: "Composite", + className: "AzureTableStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + sasUri: { + serializedName: "typeProperties.sasUri", + type: { + name: "any" + } + }, + sasToken: { + serializedName: "typeProperties.sasToken", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const AzureSqlDWLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureSqlDW", + type: { + name: "Composite", + className: "AzureSqlDWLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SqlServerLinkedService: coreHttp.CompositeMapper = { + serializedName: "SqlServer", + type: { + name: "Composite", + className: "SqlServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlDatabaseLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureSqlDatabase", + type: { + name: "Composite", + className: "AzureSqlDatabaseLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlMILinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureSqlMI", + type: { + name: "Composite", + className: "AzureSqlMILinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBatchLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureBatch", + type: { + name: "Composite", + className: "AzureBatchLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + accountName: { + serializedName: "typeProperties.accountName", + required: true, + type: { + name: "any" + } + }, + accessKey: { + serializedName: "typeProperties.accessKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + batchUri: { + serializedName: "typeProperties.batchUri", + required: true, + type: { + name: "any" + } + }, + poolName: { + serializedName: "typeProperties.poolName", + required: true, + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureKeyVaultLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureKeyVault", + type: { + name: "Composite", + className: "AzureKeyVaultLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + baseUrl: { + serializedName: "typeProperties.baseUrl", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbLinkedService: coreHttp.CompositeMapper = { + serializedName: "CosmosDb", + type: { + name: "Composite", + className: "CosmosDbLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + accountEndpoint: { + serializedName: "typeProperties.accountEndpoint", + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsLinkedService: coreHttp.CompositeMapper = { + serializedName: "Dynamics", + type: { + name: "Composite", + className: "DynamicsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + deploymentType: { + serializedName: "typeProperties.deploymentType", + required: true, + type: { + name: "String" + } + }, + hostName: { + serializedName: "typeProperties.hostName", + type: { + name: "String" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "String" + } + }, + serviceUri: { + serializedName: "typeProperties.serviceUri", + type: { + name: "String" + } + }, + organizationName: { + serializedName: "typeProperties.organizationName", + type: { + name: "String" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsCrmLinkedService: coreHttp.CompositeMapper = { + serializedName: "DynamicsCrm", + type: { + name: "Composite", + className: "DynamicsCrmLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + deploymentType: { + serializedName: "typeProperties.deploymentType", + required: true, + type: { + name: "String" + } + }, + hostName: { + serializedName: "typeProperties.hostName", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + serviceUri: { + serializedName: "typeProperties.serviceUri", + type: { + name: "any" + } + }, + organizationName: { + serializedName: "typeProperties.organizationName", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const CommonDataServiceForAppsLinkedService: coreHttp.CompositeMapper = { + serializedName: "CommonDataServiceForApps", + type: { + name: "Composite", + className: "CommonDataServiceForAppsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + deploymentType: { + serializedName: "typeProperties.deploymentType", + required: true, + type: { + name: "String" + } + }, + hostName: { + serializedName: "typeProperties.hostName", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + serviceUri: { + serializedName: "typeProperties.serviceUri", + type: { + name: "any" + } + }, + organizationName: { + serializedName: "typeProperties.organizationName", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalCredentialType: { + serializedName: "typeProperties.servicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalCredential: { + serializedName: "typeProperties.servicePrincipalCredential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HDInsightLinkedService: coreHttp.CompositeMapper = { + serializedName: "HDInsight", + type: { + name: "Composite", + className: "HDInsightLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clusterUri: { + serializedName: "typeProperties.clusterUri", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + hcatalogLinkedServiceName: { + serializedName: "typeProperties.hcatalogLinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + isEspEnabled: { + serializedName: "typeProperties.isEspEnabled", + type: { + name: "any" + } + }, + fileSystem: { + serializedName: "typeProperties.fileSystem", + type: { + name: "any" + } + } + } + } +}; + +export const FileServerLinkedService: coreHttp.CompositeMapper = { + serializedName: "FileServer", + type: { + name: "Composite", + className: "FileServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + userId: { + serializedName: "typeProperties.userId", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureFileStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureFileStorage", + type: { + name: "Composite", + className: "AzureFileStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + userId: { + serializedName: "typeProperties.userId", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleCloudStorageLinkedService: coreHttp.CompositeMapper = { + serializedName: "GoogleCloudStorage", + type: { + name: "Composite", + className: "GoogleCloudStorageLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", + type: { + name: "any" + } + }, + secretAccessKey: { + serializedName: "typeProperties.secretAccessKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + serviceUrl: { + serializedName: "typeProperties.serviceUrl", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const OracleLinkedService: coreHttp.CompositeMapper = { + serializedName: "Oracle", + type: { + name: "Composite", + className: "OracleLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMySqlLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureMySql", + type: { + name: "Composite", + className: "AzureMySqlLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MySqlLinkedService: coreHttp.CompositeMapper = { + serializedName: "MySql", + type: { + name: "Composite", + className: "MySqlLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PostgreSqlLinkedService: coreHttp.CompositeMapper = { + serializedName: "PostgreSql", + type: { + name: "Composite", + className: "PostgreSqlLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SybaseLinkedService: coreHttp.CompositeMapper = { + serializedName: "Sybase", + type: { + name: "Composite", + className: "SybaseLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + }, + schema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const Db2LinkedService: coreHttp.CompositeMapper = { + serializedName: "Db2", + type: { + name: "Composite", + className: "Db2LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + packageCollection: { + serializedName: "typeProperties.packageCollection", + type: { + name: "any" + } + }, + certificateCommonName: { + serializedName: "typeProperties.certificateCommonName", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const TeradataLinkedService: coreHttp.CompositeMapper = { + serializedName: "Teradata", + type: { + name: "Composite", + className: "TeradataLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + server: { + serializedName: "typeProperties.server", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMLLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureML", + type: { + name: "Composite", + className: "AzureMLLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + mlEndpoint: { + serializedName: "typeProperties.mlEndpoint", + required: true, + type: { + name: "any" + } + }, + apiKey: { + serializedName: "typeProperties.apiKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + updateResourceEndpoint: { + serializedName: "typeProperties.updateResourceEndpoint", + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMLServiceLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureMLService", + type: { + name: "Composite", + className: "AzureMLServiceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + subscriptionId: { + serializedName: "typeProperties.subscriptionId", + required: true, + type: { + name: "any" + } + }, + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", + required: true, + type: { + name: "any" + } + }, + mlWorkspaceName: { + serializedName: "typeProperties.mlWorkspaceName", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const OdbcLinkedService: coreHttp.CompositeMapper = { + serializedName: "Odbc", + type: { + name: "Composite", + className: "OdbcLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + credential: { + serializedName: "typeProperties.credential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const InformixLinkedService: coreHttp.CompositeMapper = { + serializedName: "Informix", + type: { + name: "Composite", + className: "InformixLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + credential: { + serializedName: "typeProperties.credential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MicrosoftAccessLinkedService: coreHttp.CompositeMapper = { + serializedName: "MicrosoftAccess", + type: { + name: "Composite", + className: "MicrosoftAccessLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + credential: { + serializedName: "typeProperties.credential", + type: { + name: "Composite", + className: "SecretBase" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HdfsLinkedService: coreHttp.CompositeMapper = { + serializedName: "Hdfs", + type: { + name: "Composite", + className: "HdfsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + } + } + } +}; + +export const ODataLinkedService: coreHttp.CompositeMapper = { + serializedName: "OData", + type: { + name: "Composite", + className: "ODataLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + aadResourceId: { + serializedName: "typeProperties.aadResourceId", + type: { + name: "any" + } + }, + aadServicePrincipalCredentialType: { + serializedName: "typeProperties.aadServicePrincipalCredentialType", + type: { + name: "String" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalEmbeddedCert: { + serializedName: "typeProperties.servicePrincipalEmbeddedCert", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalEmbeddedCertPassword: { + serializedName: "typeProperties.servicePrincipalEmbeddedCertPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const WebLinkedService: coreHttp.CompositeMapper = { + serializedName: "Web", + type: { + name: "Composite", + className: "WebLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + typeProperties: { + serializedName: "typeProperties", + type: { + name: "Composite", + className: "WebLinkedServiceTypeProperties" + } + } + } + } +}; + +export const CassandraLinkedService: coreHttp.CompositeMapper = { + serializedName: "Cassandra", + type: { + name: "Composite", + className: "CassandraLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbLinkedService: coreHttp.CompositeMapper = { + serializedName: "MongoDb", + type: { + name: "Composite", + className: "MongoDbLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + databaseName: { + serializedName: "typeProperties.databaseName", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + authSource: { + serializedName: "typeProperties.authSource", + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbV2LinkedService: coreHttp.CompositeMapper = { + serializedName: "MongoDbV2", + type: { + name: "Composite", + className: "MongoDbV2LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiLinkedService: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApi", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + required: true, + type: { + name: "any" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStore", + type: { + name: "Composite", + className: "AzureDataLakeStoreLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + dataLakeStoreUri: { + serializedName: "typeProperties.dataLakeStoreUri", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + accountName: { + serializedName: "typeProperties.accountName", + type: { + name: "any" + } + }, + subscriptionId: { + serializedName: "typeProperties.subscriptionId", + type: { + name: "any" + } + }, + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFS", + type: { + name: "Composite", + className: "AzureBlobFSLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + accountKey: { + serializedName: "typeProperties.accountKey", + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const Office365LinkedService: coreHttp.CompositeMapper = { + serializedName: "Office365", + type: { + name: "Composite", + className: "Office365LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + office365TenantId: { + serializedName: "typeProperties.office365TenantId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalTenantId: { + serializedName: "typeProperties.servicePrincipalTenantId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceLinkedService: coreHttp.CompositeMapper = { + serializedName: "Salesforce", + type: { + name: "Composite", + className: "SalesforceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + environmentUrl: { + serializedName: "typeProperties.environmentUrl", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + securityToken: { + serializedName: "typeProperties.securityToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudLinkedService: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloud", + type: { + name: "Composite", + className: "SalesforceServiceCloudLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + environmentUrl: { + serializedName: "typeProperties.environmentUrl", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + securityToken: { + serializedName: "typeProperties.securityToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + extendedProperties: { + serializedName: "typeProperties.extendedProperties", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SapCloudForCustomerLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapCloudForCustomer", + type: { + name: "Composite", + className: "SapCloudForCustomerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SapEccLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapEcc", + type: { + name: "Composite", + className: "SapEccLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "String" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "String" + } + } + } + } +}; + +export const SapOpenHubLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapOpenHub", + type: { + name: "Composite", + className: "SapOpenHubLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + systemNumber: { + serializedName: "typeProperties.systemNumber", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + language: { + serializedName: "typeProperties.language", + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const RestServiceLinkedService: coreHttp.CompositeMapper = { + serializedName: "RestService", + type: { + name: "Composite", + className: "RestServiceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + type: { + name: "any" + } + }, + aadResourceId: { + serializedName: "typeProperties.aadResourceId", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonS3LinkedService: coreHttp.CompositeMapper = { + serializedName: "AmazonS3", + type: { + name: "Composite", + className: "AmazonS3LinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", + type: { + name: "any" + } + }, + secretAccessKey: { + serializedName: "typeProperties.secretAccessKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + serviceUrl: { + serializedName: "typeProperties.serviceUrl", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonRedshiftLinkedService: coreHttp.CompositeMapper = { + serializedName: "AmazonRedshift", + type: { + name: "Composite", + className: "AmazonRedshiftLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const CustomDataSourceLinkedService: coreHttp.CompositeMapper = { + serializedName: "CustomDataSource", + type: { + name: "Composite", + className: "CustomDataSourceLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + typeProperties: { + serializedName: "typeProperties", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureSearchLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureSearch", + type: { + name: "Composite", + className: "AzureSearchLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + key: { + serializedName: "typeProperties.key", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HttpLinkedService: coreHttp.CompositeMapper = { + serializedName: "HttpServer", + type: { + name: "Composite", + className: "HttpLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + embeddedCertData: { + serializedName: "typeProperties.embeddedCertData", + type: { + name: "any" + } + }, + certThumbprint: { + serializedName: "typeProperties.certThumbprint", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", + type: { + name: "any" + } + } + } + } +}; + +export const FtpServerLinkedService: coreHttp.CompositeMapper = { + serializedName: "FtpServer", + type: { + name: "Composite", + className: "FtpServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + enableServerCertificateValidation: { + serializedName: "typeProperties.enableServerCertificateValidation", + type: { + name: "any" + } + } + } + } +}; + +export const SftpServerLinkedService: coreHttp.CompositeMapper = { + serializedName: "Sftp", + type: { + name: "Composite", + className: "SftpServerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + privateKeyPath: { + serializedName: "typeProperties.privateKeyPath", + type: { + name: "any" + } + }, + privateKeyContent: { + serializedName: "typeProperties.privateKeyContent", + type: { + name: "Composite", + className: "SecretBase" + } + }, + passPhrase: { + serializedName: "typeProperties.passPhrase", + type: { + name: "Composite", + className: "SecretBase" + } + }, + skipHostKeyValidation: { + serializedName: "typeProperties.skipHostKeyValidation", + type: { + name: "any" + } + }, + hostKeyFingerprint: { + serializedName: "typeProperties.hostKeyFingerprint", + type: { + name: "any" + } + } + } + } +}; + +export const SapBWLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapBW", + type: { + name: "Composite", + className: "SapBWLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + systemNumber: { + serializedName: "typeProperties.systemNumber", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SapHanaLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapHana", + type: { + name: "Composite", + className: "SapHanaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + server: { + serializedName: "typeProperties.server", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + type: { + name: "String" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonMWSLinkedService: coreHttp.CompositeMapper = { + serializedName: "AmazonMWS", + type: { + name: "Composite", + className: "AmazonMWSLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + marketplaceID: { + serializedName: "typeProperties.marketplaceID", + required: true, + type: { + name: "any" + } + }, + sellerID: { + serializedName: "typeProperties.sellerID", + required: true, + type: { + name: "any" + } + }, + mwsAuthToken: { + serializedName: "typeProperties.mwsAuthToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessKeyId: { + serializedName: "typeProperties.accessKeyId", + required: true, + type: { + name: "any" + } + }, + secretKey: { + serializedName: "typeProperties.secretKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzurePostgreSqlLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzurePostgreSql", + type: { + name: "Composite", + className: "AzurePostgreSqlLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ConcurLinkedService: coreHttp.CompositeMapper = { + serializedName: "Concur", + type: { + name: "Composite", + className: "ConcurLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const CouchbaseLinkedService: coreHttp.CompositeMapper = { + serializedName: "Couchbase", + type: { + name: "Composite", + className: "CouchbaseLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + credString: { + serializedName: "typeProperties.credString", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const DrillLinkedService: coreHttp.CompositeMapper = { + serializedName: "Drill", + type: { + name: "Composite", + className: "DrillLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const EloquaLinkedService: coreHttp.CompositeMapper = { + serializedName: "Eloqua", + type: { + name: "Composite", + className: "EloquaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleBigQueryLinkedService: coreHttp.CompositeMapper = { + serializedName: "GoogleBigQuery", + type: { + name: "Composite", + className: "GoogleBigQueryLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + project: { + serializedName: "typeProperties.project", + required: true, + type: { + name: "any" + } + }, + additionalProjects: { + serializedName: "typeProperties.additionalProjects", + type: { + name: "any" + } + }, + requestGoogleDriveScope: { + serializedName: "typeProperties.requestGoogleDriveScope", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + email: { + serializedName: "typeProperties.email", + type: { + name: "any" + } + }, + keyFilePath: { + serializedName: "typeProperties.keyFilePath", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const GreenplumLinkedService: coreHttp.CompositeMapper = { + serializedName: "Greenplum", + type: { + name: "Composite", + className: "GreenplumLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HBaseLinkedService: coreHttp.CompositeMapper = { + serializedName: "HBase", + type: { + name: "Composite", + className: "HBaseLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HiveLinkedService: coreHttp.CompositeMapper = { + serializedName: "Hive", + type: { + name: "Composite", + className: "HiveLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + serverType: { + serializedName: "typeProperties.serverType", + type: { + name: "String" + } + }, + thriftTransportProtocol: { + serializedName: "typeProperties.thriftTransportProtocol", + type: { + name: "String" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + serviceDiscoveryMode: { + serializedName: "typeProperties.serviceDiscoveryMode", + type: { + name: "any" + } + }, + zooKeeperNameSpace: { + serializedName: "typeProperties.zooKeeperNameSpace", + type: { + name: "any" + } + }, + useNativeQuery: { + serializedName: "typeProperties.useNativeQuery", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HubspotLinkedService: coreHttp.CompositeMapper = { + serializedName: "Hubspot", + type: { + name: "Composite", + className: "HubspotLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ImpalaLinkedService: coreHttp.CompositeMapper = { + serializedName: "Impala", + type: { + name: "Composite", + className: "ImpalaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const JiraLinkedService: coreHttp.CompositeMapper = { + serializedName: "Jira", + type: { + name: "Composite", + className: "JiraLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MagentoLinkedService: coreHttp.CompositeMapper = { + serializedName: "Magento", + type: { + name: "Composite", + className: "MagentoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MariaDBLinkedService: coreHttp.CompositeMapper = { + serializedName: "MariaDB", + type: { + name: "Composite", + className: "MariaDBLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMariaDBLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureMariaDB", + type: { + name: "Composite", + className: "AzureMariaDBLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const MarketoLinkedService: coreHttp.CompositeMapper = { + serializedName: "Marketo", + type: { + name: "Composite", + className: "MarketoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PaypalLinkedService: coreHttp.CompositeMapper = { + serializedName: "Paypal", + type: { + name: "Composite", + className: "PaypalLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PhoenixLinkedService: coreHttp.CompositeMapper = { + serializedName: "Phoenix", + type: { + name: "Composite", + className: "PhoenixLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const PrestoLinkedService: coreHttp.CompositeMapper = { + serializedName: "Presto", + type: { + name: "Composite", + className: "PrestoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + serverVersion: { + serializedName: "typeProperties.serverVersion", + required: true, + type: { + name: "any" + } + }, + catalog: { + serializedName: "typeProperties.catalog", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + timeZoneID: { + serializedName: "typeProperties.timeZoneID", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const QuickBooksLinkedService: coreHttp.CompositeMapper = { + serializedName: "QuickBooks", + type: { + name: "Composite", + className: "QuickBooksLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + companyId: { + serializedName: "typeProperties.companyId", + required: true, + type: { + name: "any" + } + }, + consumerKey: { + serializedName: "typeProperties.consumerKey", + required: true, + type: { + name: "any" + } + }, + consumerSecret: { + serializedName: "typeProperties.consumerSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + accessTokenSecret: { + serializedName: "typeProperties.accessTokenSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ServiceNowLinkedService: coreHttp.CompositeMapper = { + serializedName: "ServiceNow", + type: { + name: "Composite", + className: "ServiceNowLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ShopifyLinkedService: coreHttp.CompositeMapper = { + serializedName: "Shopify", + type: { + name: "Composite", + className: "ShopifyLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SparkLinkedService: coreHttp.CompositeMapper = { + serializedName: "Spark", + type: { + name: "Composite", + className: "SparkLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + port: { + serializedName: "typeProperties.port", + required: true, + type: { + name: "any" + } + }, + serverType: { + serializedName: "typeProperties.serverType", + type: { + name: "String" + } + }, + thriftTransportProtocol: { + serializedName: "typeProperties.thriftTransportProtocol", + type: { + name: "String" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + username: { + serializedName: "typeProperties.username", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + httpPath: { + serializedName: "typeProperties.httpPath", + type: { + name: "any" + } + }, + enableSsl: { + serializedName: "typeProperties.enableSsl", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + allowHostNameCNMismatch: { + serializedName: "typeProperties.allowHostNameCNMismatch", + type: { + name: "any" + } + }, + allowSelfSignedServerCert: { + serializedName: "typeProperties.allowSelfSignedServerCert", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SquareLinkedService: coreHttp.CompositeMapper = { + serializedName: "Square", + type: { + name: "Composite", + className: "SquareLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + redirectUri: { + serializedName: "typeProperties.redirectUri", + required: true, + type: { + name: "any" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const XeroLinkedService: coreHttp.CompositeMapper = { + serializedName: "Xero", + type: { + name: "Composite", + className: "XeroLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + consumerKey: { + serializedName: "typeProperties.consumerKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + privateKey: { + serializedName: "typeProperties.privateKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ZohoLinkedService: coreHttp.CompositeMapper = { + serializedName: "Zoho", + type: { + name: "Composite", + className: "ZohoLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const VerticaLinkedService: coreHttp.CompositeMapper = { + serializedName: "Vertica", + type: { + name: "Composite", + className: "VerticaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const NetezzaLinkedService: coreHttp.CompositeMapper = { + serializedName: "Netezza", + type: { + name: "Composite", + className: "NetezzaLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + connectionString: { + serializedName: "typeProperties.connectionString", + type: { + name: "any" + } + }, + pwd: { + serializedName: "typeProperties.pwd", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceMarketingCloudLinkedService: coreHttp.CompositeMapper = { + serializedName: "SalesforceMarketingCloud", + type: { + name: "Composite", + className: "SalesforceMarketingCloudLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const HDInsightOnDemandLinkedService: coreHttp.CompositeMapper = { + serializedName: "HDInsightOnDemand", + type: { + name: "Composite", + className: "HDInsightOnDemandLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clusterSize: { + serializedName: "typeProperties.clusterSize", + required: true, + type: { + name: "any" + } + }, + timeToLive: { + serializedName: "typeProperties.timeToLive", + required: true, + type: { + name: "any" + } + }, + version: { + serializedName: "typeProperties.version", + required: true, + type: { + name: "any" + } + }, + linkedServiceName: { + serializedName: "typeProperties.linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + hostSubscriptionId: { + serializedName: "typeProperties.hostSubscriptionId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, + type: { + name: "any" + } + }, + clusterResourceGroup: { + serializedName: "typeProperties.clusterResourceGroup", + required: true, + type: { + name: "any" + } + }, + clusterNamePrefix: { + serializedName: "typeProperties.clusterNamePrefix", + type: { + name: "any" + } + }, + clusterUserName: { + serializedName: "typeProperties.clusterUserName", + type: { + name: "any" + } + }, + clusterPassword: { + serializedName: "typeProperties.clusterPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clusterSshUserName: { + serializedName: "typeProperties.clusterSshUserName", + type: { + name: "any" + } + }, + clusterSshPassword: { + serializedName: "typeProperties.clusterSshPassword", + type: { + name: "Composite", + className: "SecretBase" + } + }, + additionalLinkedServiceNames: { + serializedName: "typeProperties.additionalLinkedServiceNames", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + hcatalogLinkedServiceName: { + serializedName: "typeProperties.hcatalogLinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + clusterType: { + serializedName: "typeProperties.clusterType", + type: { + name: "any" + } + }, + sparkVersion: { + serializedName: "typeProperties.sparkVersion", + type: { + name: "any" + } + }, + coreConfiguration: { + serializedName: "typeProperties.coreConfiguration", + type: { + name: "any" + } + }, + hBaseConfiguration: { + serializedName: "typeProperties.hBaseConfiguration", + type: { + name: "any" + } + }, + hdfsConfiguration: { + serializedName: "typeProperties.hdfsConfiguration", + type: { + name: "any" + } + }, + hiveConfiguration: { + serializedName: "typeProperties.hiveConfiguration", + type: { + name: "any" + } + }, + mapReduceConfiguration: { + serializedName: "typeProperties.mapReduceConfiguration", + type: { + name: "any" + } + }, + oozieConfiguration: { + serializedName: "typeProperties.oozieConfiguration", + type: { + name: "any" + } + }, + stormConfiguration: { + serializedName: "typeProperties.stormConfiguration", + type: { + name: "any" + } + }, + yarnConfiguration: { + serializedName: "typeProperties.yarnConfiguration", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + }, + headNodeSize: { + serializedName: "typeProperties.headNodeSize", + type: { + name: "any" + } + }, + dataNodeSize: { + serializedName: "typeProperties.dataNodeSize", + type: { + name: "any" + } + }, + zookeeperNodeSize: { + serializedName: "typeProperties.zookeeperNodeSize", + type: { + name: "any" + } + }, + scriptActions: { + serializedName: "typeProperties.scriptActions", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ScriptAction" + } + } + } + }, + virtualNetworkId: { + serializedName: "typeProperties.virtualNetworkId", + type: { + name: "any" + } + }, + subnetName: { + serializedName: "typeProperties.subnetName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeAnalyticsLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeAnalytics", + type: { + name: "Composite", + className: "AzureDataLakeAnalyticsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + accountName: { + serializedName: "typeProperties.accountName", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, + type: { + name: "any" + } + }, + subscriptionId: { + serializedName: "typeProperties.subscriptionId", + type: { + name: "any" + } + }, + resourceGroupName: { + serializedName: "typeProperties.resourceGroupName", + type: { + name: "any" + } + }, + dataLakeAnalyticsUri: { + serializedName: "typeProperties.dataLakeAnalyticsUri", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDatabricksLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDatabricks", + type: { + name: "Composite", + className: "AzureDatabricksLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + domain: { + serializedName: "typeProperties.domain", + required: true, + type: { + name: "any" + } + }, + accessToken: { + serializedName: "typeProperties.accessToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + existingClusterId: { + serializedName: "typeProperties.existingClusterId", + type: { + name: "any" + } + }, + instancePoolId: { + serializedName: "typeProperties.instancePoolId", + type: { + name: "any" + } + }, + newClusterVersion: { + serializedName: "typeProperties.newClusterVersion", + type: { + name: "any" + } + }, + newClusterNumOfWorker: { + serializedName: "typeProperties.newClusterNumOfWorker", + type: { + name: "any" + } + }, + newClusterNodeType: { + serializedName: "typeProperties.newClusterNodeType", + type: { + name: "any" + } + }, + newClusterSparkConf: { + serializedName: "typeProperties.newClusterSparkConf", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterSparkEnvVars: { + serializedName: "typeProperties.newClusterSparkEnvVars", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterCustomTags: { + serializedName: "typeProperties.newClusterCustomTags", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + newClusterDriverNodeType: { + serializedName: "typeProperties.newClusterDriverNodeType", + type: { + name: "any" + } + }, + newClusterInitScripts: { + serializedName: "typeProperties.newClusterInitScripts", + type: { + name: "any" + } + }, + newClusterEnableElasticDisk: { + serializedName: "typeProperties.newClusterEnableElasticDisk", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const ResponsysLinkedService: coreHttp.CompositeMapper = { + serializedName: "Responsys", + type: { + name: "Composite", + className: "ResponsysLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + required: true, + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsAXLinkedService: coreHttp.CompositeMapper = { + serializedName: "DynamicsAX", + type: { + name: "Composite", + className: "DynamicsAXLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, + type: { + name: "any" + } + }, + aadResourceId: { + serializedName: "typeProperties.aadResourceId", + required: true, + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const OracleServiceCloudLinkedService: coreHttp.CompositeMapper = { + serializedName: "OracleServiceCloud", + type: { + name: "Composite", + className: "OracleServiceCloudLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + host: { + serializedName: "typeProperties.host", + required: true, + type: { + name: "any" + } + }, + username: { + serializedName: "typeProperties.username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + useEncryptedEndpoints: { + serializedName: "typeProperties.useEncryptedEndpoints", + type: { + name: "any" + } + }, + useHostVerification: { + serializedName: "typeProperties.useHostVerification", + type: { + name: "any" + } + }, + usePeerVerification: { + serializedName: "typeProperties.usePeerVerification", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleAdWordsLinkedService: coreHttp.CompositeMapper = { + serializedName: "GoogleAdWords", + type: { + name: "Composite", + className: "GoogleAdWordsLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + clientCustomerID: { + serializedName: "typeProperties.clientCustomerID", + required: true, + type: { + name: "any" + } + }, + developerToken: { + serializedName: "typeProperties.developerToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + authenticationType: { + serializedName: "typeProperties.authenticationType", + required: true, + type: { + name: "String" + } + }, + refreshToken: { + serializedName: "typeProperties.refreshToken", + type: { + name: "Composite", + className: "SecretBase" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + clientSecret: { + serializedName: "typeProperties.clientSecret", + type: { + name: "Composite", + className: "SecretBase" + } + }, + email: { + serializedName: "typeProperties.email", + type: { + name: "any" + } + }, + keyFilePath: { + serializedName: "typeProperties.keyFilePath", + type: { + name: "any" + } + }, + trustedCertPath: { + serializedName: "typeProperties.trustedCertPath", + type: { + name: "any" + } + }, + useSystemTrustStore: { + serializedName: "typeProperties.useSystemTrustStore", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const SapTableLinkedService: coreHttp.CompositeMapper = { + serializedName: "SapTable", + type: { + name: "Composite", + className: "SapTableLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + server: { + serializedName: "typeProperties.server", + type: { + name: "any" + } + }, + systemNumber: { + serializedName: "typeProperties.systemNumber", + type: { + name: "any" + } + }, + clientId: { + serializedName: "typeProperties.clientId", + type: { + name: "any" + } + }, + language: { + serializedName: "typeProperties.language", + type: { + name: "any" + } + }, + systemId: { + serializedName: "typeProperties.systemId", + type: { + name: "any" + } + }, + userName: { + serializedName: "typeProperties.userName", + type: { + name: "any" + } + }, + password: { + serializedName: "typeProperties.password", + type: { + name: "Composite", + className: "SecretBase" + } + }, + messageServer: { + serializedName: "typeProperties.messageServer", + type: { + name: "any" + } + }, + messageServerService: { + serializedName: "typeProperties.messageServerService", + type: { + name: "any" + } + }, + sncMode: { + serializedName: "typeProperties.sncMode", + type: { + name: "any" + } + }, + sncMyName: { + serializedName: "typeProperties.sncMyName", + type: { + name: "any" + } + }, + sncPartnerName: { + serializedName: "typeProperties.sncPartnerName", + type: { + name: "any" + } + }, + sncLibraryPath: { + serializedName: "typeProperties.sncLibraryPath", + type: { + name: "any" + } + }, + sncQop: { + serializedName: "typeProperties.sncQop", + type: { + name: "any" + } + }, + logonGroup: { + serializedName: "typeProperties.logonGroup", + type: { + name: "any" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataExplorerLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorer", + type: { + name: "Composite", + className: "AzureDataExplorerLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + endpoint: { + serializedName: "typeProperties.endpoint", + required: true, + type: { + name: "any" + } + }, + servicePrincipalId: { + serializedName: "typeProperties.servicePrincipalId", + required: true, + type: { + name: "any" + } + }, + servicePrincipalKey: { + serializedName: "typeProperties.servicePrincipalKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + database: { + serializedName: "typeProperties.database", + required: true, + type: { + name: "any" + } + }, + tenant: { + serializedName: "typeProperties.tenant", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureFunctionLinkedService: coreHttp.CompositeMapper = { + serializedName: "AzureFunction", + type: { + name: "Composite", + className: "AzureFunctionLinkedService", + uberParent: "LinkedService", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedService.type.modelProperties, + functionAppUrl: { + serializedName: "typeProperties.functionAppUrl", + required: true, + type: { + name: "any" + } + }, + functionKey: { + serializedName: "typeProperties.functionKey", + type: { + name: "Composite", + className: "SecretBase" + } + }, + encryptedCredential: { + serializedName: "typeProperties.encryptedCredential", + type: { + name: "any" + } + } + } + } +}; + +export const AzureEntityResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "AzureEntityResource", + modelProperties: { + ...Resource.type.modelProperties, + etag: { + serializedName: "etag", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const PrivateEndpointConnection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PrivateEndpointConnection", + modelProperties: { + ...Resource.type.modelProperties, + privateEndpoint: { + serializedName: "properties.privateEndpoint", + type: { + name: "Composite", + className: "PrivateEndpoint" + } + }, + privateLinkServiceConnectionState: { + serializedName: "properties.privateLinkServiceConnectionState", + type: { + name: "Composite", + className: "PrivateLinkServiceConnectionState" + } + }, + provisioningState: { + serializedName: "properties.provisioningState", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; + +export const TrackedResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TrackedResource", + modelProperties: { + ...Resource.type.modelProperties, + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + location: { + serializedName: "location", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const ProxyResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ProxyResource", + modelProperties: { + ...Resource.type.modelProperties + } + } +}; + +export const AvroDataset: coreHttp.CompositeMapper = { + serializedName: "Avro", + type: { + name: "Composite", + className: "AvroDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + avroCompressionCodec: { + serializedName: "typeProperties.avroCompressionCodec", + type: { + name: "String" + } + }, + avroCompressionLevel: { + constraints: { + InclusiveMaximum: 9, + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.avroCompressionLevel", + type: { + name: "Number" + } + } + } + } +}; + +export const ParquetDataset: coreHttp.CompositeMapper = { + serializedName: "Parquet", + type: { + name: "Composite", + className: "ParquetDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + compressionCodec: { + serializedName: "typeProperties.compressionCodec", + type: { + name: "String" + } + } + } + } +}; + +export const DelimitedTextDataset: coreHttp.CompositeMapper = { + serializedName: "DelimitedText", + type: { + name: "Composite", + className: "DelimitedTextDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + columnDelimiter: { + serializedName: "typeProperties.columnDelimiter", + type: { + name: "any" + } + }, + rowDelimiter: { + serializedName: "typeProperties.rowDelimiter", + type: { + name: "any" + } + }, + encodingName: { + serializedName: "typeProperties.encodingName", + type: { + name: "any" + } + }, + compressionCodec: { + serializedName: "typeProperties.compressionCodec", + type: { + name: "String" + } + }, + compressionLevel: { + serializedName: "typeProperties.compressionLevel", + type: { + name: "String" + } + }, + quoteChar: { + serializedName: "typeProperties.quoteChar", + type: { + name: "any" + } + }, + escapeChar: { + serializedName: "typeProperties.escapeChar", + type: { + name: "any" + } + }, + firstRowAsHeader: { + serializedName: "typeProperties.firstRowAsHeader", + type: { + name: "any" + } + }, + nullValue: { + serializedName: "typeProperties.nullValue", + type: { + name: "any" + } + } + } + } +}; + +export const JsonDataset: coreHttp.CompositeMapper = { + serializedName: "Json", + type: { + name: "Composite", + className: "JsonDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + encodingName: { + serializedName: "typeProperties.encodingName", + type: { + name: "any" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + +export const OrcDataset: coreHttp.CompositeMapper = { + serializedName: "Orc", + type: { + name: "Composite", + className: "OrcDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + orcCompressionCodec: { + serializedName: "typeProperties.orcCompressionCodec", + type: { + name: "String" + } + } + } + } +}; + +export const BinaryDataset: coreHttp.CompositeMapper = { + serializedName: "Binary", + type: { + name: "Composite", + className: "BinaryDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + location: { + serializedName: "typeProperties.location", + type: { + name: "Composite", + className: "DatasetLocation" + } + }, + compression: { + serializedName: "typeProperties.compression", + type: { + name: "Composite", + className: "DatasetCompression" + } + } + } + } +}; + +export const AzureTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureTable", + type: { + name: "Composite", + className: "AzureTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSqlTable", + type: { + name: "Composite", + className: "AzureSqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlMITableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSqlMITable", + type: { + name: "Composite", + className: "AzureSqlMITableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlDWTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSqlDWTable", + type: { + name: "Composite", + className: "AzureSqlDWTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const CassandraTableDataset: coreHttp.CompositeMapper = { + serializedName: "CassandraTable", + type: { + name: "Composite", + className: "CassandraTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + keyspace: { + serializedName: "typeProperties.keyspace", + type: { + name: "any" + } + } + } + } +}; + +export const CustomDataset: coreHttp.CompositeMapper = { + serializedName: "CustomDataset", + type: { + name: "Composite", + className: "CustomDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + typeProperties: { + serializedName: "typeProperties", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbSqlApiCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "CosmosDbSqlApiCollection", + type: { + name: "Composite", + className: "CosmosDbSqlApiCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const DocumentDbCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "DocumentDbCollection", + type: { + name: "Composite", + className: "DocumentDbCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsEntityDataset: coreHttp.CompositeMapper = { + serializedName: "DynamicsEntity", + type: { + name: "Composite", + className: "DynamicsEntityDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsCrmEntityDataset: coreHttp.CompositeMapper = { + serializedName: "DynamicsCrmEntity", + type: { + name: "Composite", + className: "DynamicsCrmEntityDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", + type: { + name: "any" + } + } + } + } +}; + +export const CommonDataServiceForAppsEntityDataset: coreHttp.CompositeMapper = { + serializedName: "CommonDataServiceForAppsEntity", + type: { + name: "Composite", + className: "CommonDataServiceForAppsEntityDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + entityName: { + serializedName: "typeProperties.entityName", + type: { + name: "any" + } + } + } + } +}; + +export const Office365Dataset: coreHttp.CompositeMapper = { + serializedName: "Office365Table", + type: { + name: "Composite", + className: "Office365Dataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, + type: { + name: "any" + } + }, + predicate: { + serializedName: "typeProperties.predicate", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "MongoDbCollection", + type: { + name: "Composite", + className: "MongoDbCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collectionName: { + serializedName: "typeProperties.collectionName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbV2CollectionDataset: coreHttp.CompositeMapper = { + serializedName: "MongoDbV2Collection", + type: { + name: "Composite", + className: "MongoDbV2CollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collection: { + serializedName: "typeProperties.collection", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiCollectionDataset: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApiCollection", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiCollectionDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + collection: { + serializedName: "typeProperties.collection", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const ODataResourceDataset: coreHttp.CompositeMapper = { + serializedName: "ODataResource", + type: { + name: "Composite", + className: "ODataResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + type: { + name: "any" + } + } + } + } +}; + +export const OracleTableDataset: coreHttp.CompositeMapper = { + serializedName: "OracleTable", + type: { + name: "Composite", + className: "OracleTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const TeradataTableDataset: coreHttp.CompositeMapper = { + serializedName: "TeradataTable", + type: { + name: "Composite", + className: "TeradataTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + database: { + serializedName: "typeProperties.database", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMySqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureMySqlTable", + type: { + name: "Composite", + className: "AzureMySqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonRedshiftTableDataset: coreHttp.CompositeMapper = { + serializedName: "AmazonRedshiftTable", + type: { + name: "Composite", + className: "AmazonRedshiftTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const Db2TableDataset: coreHttp.CompositeMapper = { + serializedName: "Db2Table", + type: { + name: "Composite", + className: "Db2TableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const RelationalTableDataset: coreHttp.CompositeMapper = { + serializedName: "RelationalTable", + type: { + name: "Composite", + className: "RelationalTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const InformixTableDataset: coreHttp.CompositeMapper = { + serializedName: "InformixTable", + type: { + name: "Composite", + className: "InformixTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const OdbcTableDataset: coreHttp.CompositeMapper = { + serializedName: "OdbcTable", + type: { + name: "Composite", + className: "OdbcTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const MySqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "MySqlTable", + type: { + name: "Composite", + className: "MySqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const PostgreSqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "PostgreSqlTable", + type: { + name: "Composite", + className: "PostgreSqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const MicrosoftAccessTableDataset: coreHttp.CompositeMapper = { + serializedName: "MicrosoftAccessTable", + type: { + name: "Composite", + className: "MicrosoftAccessTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SalesforceObject", + type: { + name: "Composite", + className: "SalesforceObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + objectApiName: { + serializedName: "typeProperties.objectApiName", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloudObject", + type: { + name: "Composite", + className: "SalesforceServiceCloudObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + objectApiName: { + serializedName: "typeProperties.objectApiName", + type: { + name: "any" + } + } + } + } +}; + +export const SybaseTableDataset: coreHttp.CompositeMapper = { + serializedName: "SybaseTable", + type: { + name: "Composite", + className: "SybaseTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const SapBwCubeDataset: coreHttp.CompositeMapper = { + serializedName: "SapBwCube", + type: { + name: "Composite", + className: "SapBwCubeDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties + } + } +}; + +export const SapCloudForCustomerResourceDataset: coreHttp.CompositeMapper = { + serializedName: "SapCloudForCustomerResource", + type: { + name: "Composite", + className: "SapCloudForCustomerResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const SapEccResourceDataset: coreHttp.CompositeMapper = { + serializedName: "SapEccResource", + type: { + name: "Composite", + className: "SapEccResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const SapHanaTableDataset: coreHttp.CompositeMapper = { + serializedName: "SapHanaTable", + type: { + name: "Composite", + className: "SapHanaTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const SapOpenHubTableDataset: coreHttp.CompositeMapper = { + serializedName: "SapOpenHubTable", + type: { + name: "Composite", + className: "SapOpenHubTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + openHubDestinationName: { + serializedName: "typeProperties.openHubDestinationName", + required: true, + type: { + name: "any" + } + }, + excludeLastRequest: { + serializedName: "typeProperties.excludeLastRequest", + type: { + name: "any" + } + }, + baseRequestId: { + serializedName: "typeProperties.baseRequestId", + type: { + name: "any" + } + } + } + } +}; + +export const SqlServerTableDataset: coreHttp.CompositeMapper = { + serializedName: "SqlServerTable", + type: { + name: "Composite", + className: "SqlServerTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const RestResourceDataset: coreHttp.CompositeMapper = { + serializedName: "RestResource", + type: { + name: "Composite", + className: "RestResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + relativeUrl: { + serializedName: "typeProperties.relativeUrl", + type: { + name: "any" + } + }, + requestMethod: { + serializedName: "typeProperties.requestMethod", + type: { + name: "any" + } + }, + requestBody: { + serializedName: "typeProperties.requestBody", + type: { + name: "any" + } + }, + additionalHeaders: { + serializedName: "typeProperties.additionalHeaders", + type: { + name: "any" + } + }, + paginationRules: { + serializedName: "typeProperties.paginationRules", + type: { + name: "any" + } + } + } + } +}; + +export const SapTableResourceDataset: coreHttp.CompositeMapper = { + serializedName: "SapTableResource", + type: { + name: "Composite", + className: "SapTableResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const WebTableDataset: coreHttp.CompositeMapper = { + serializedName: "WebTable", + type: { + name: "Composite", + className: "WebTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + index: { + serializedName: "typeProperties.index", + required: true, + type: { + name: "any" + } + }, + path: { + serializedName: "typeProperties.path", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSearchIndexDataset: coreHttp.CompositeMapper = { + serializedName: "AzureSearchIndex", + type: { + name: "Composite", + className: "AzureSearchIndexDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + indexName: { + serializedName: "typeProperties.indexName", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AmazonMWSObjectDataset: coreHttp.CompositeMapper = { + serializedName: "AmazonMWSObject", + type: { + name: "Composite", + className: "AmazonMWSObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const AzurePostgreSqlTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzurePostgreSqlTable", + type: { + name: "Composite", + className: "AzurePostgreSqlTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const ConcurObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ConcurObject", + type: { + name: "Composite", + className: "ConcurObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const CouchbaseTableDataset: coreHttp.CompositeMapper = { + serializedName: "CouchbaseTable", + type: { + name: "Composite", + className: "CouchbaseTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const DrillTableDataset: coreHttp.CompositeMapper = { + serializedName: "DrillTable", + type: { + name: "Composite", + className: "DrillTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const EloquaObjectDataset: coreHttp.CompositeMapper = { + serializedName: "EloquaObject", + type: { + name: "Composite", + className: "EloquaObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleBigQueryObjectDataset: coreHttp.CompositeMapper = { + serializedName: "GoogleBigQueryObject", + type: { + name: "Composite", + className: "GoogleBigQueryObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "any" + } + } + } + } +}; + +export const GreenplumTableDataset: coreHttp.CompositeMapper = { + serializedName: "GreenplumTable", + type: { + name: "Composite", + className: "GreenplumTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const HBaseObjectDataset: coreHttp.CompositeMapper = { + serializedName: "HBaseObject", + type: { + name: "Composite", + className: "HBaseObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const HiveObjectDataset: coreHttp.CompositeMapper = { + serializedName: "HiveObject", + type: { + name: "Composite", + className: "HiveObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const HubspotObjectDataset: coreHttp.CompositeMapper = { + serializedName: "HubspotObject", + type: { + name: "Composite", + className: "HubspotObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ImpalaObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ImpalaObject", + type: { + name: "Composite", + className: "ImpalaObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const JiraObjectDataset: coreHttp.CompositeMapper = { + serializedName: "JiraObject", + type: { + name: "Composite", + className: "JiraObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const MagentoObjectDataset: coreHttp.CompositeMapper = { + serializedName: "MagentoObject", + type: { + name: "Composite", + className: "MagentoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const MariaDBTableDataset: coreHttp.CompositeMapper = { + serializedName: "MariaDBTable", + type: { + name: "Composite", + className: "MariaDBTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMariaDBTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureMariaDBTable", + type: { + name: "Composite", + className: "AzureMariaDBTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const MarketoObjectDataset: coreHttp.CompositeMapper = { + serializedName: "MarketoObject", + type: { + name: "Composite", + className: "MarketoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const PaypalObjectDataset: coreHttp.CompositeMapper = { + serializedName: "PaypalObject", + type: { + name: "Composite", + className: "PaypalObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const PhoenixObjectDataset: coreHttp.CompositeMapper = { + serializedName: "PhoenixObject", + type: { + name: "Composite", + className: "PhoenixObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const PrestoObjectDataset: coreHttp.CompositeMapper = { + serializedName: "PrestoObject", + type: { + name: "Composite", + className: "PrestoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const QuickBooksObjectDataset: coreHttp.CompositeMapper = { + serializedName: "QuickBooksObject", + type: { + name: "Composite", + className: "QuickBooksObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ServiceNowObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ServiceNowObject", + type: { + name: "Composite", + className: "ServiceNowObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ShopifyObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ShopifyObject", + type: { + name: "Composite", + className: "ShopifyObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const SparkObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SparkObject", + type: { + name: "Composite", + className: "SparkObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const SquareObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SquareObject", + type: { + name: "Composite", + className: "SquareObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const XeroObjectDataset: coreHttp.CompositeMapper = { + serializedName: "XeroObject", + type: { + name: "Composite", + className: "XeroObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ZohoObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ZohoObject", + type: { + name: "Composite", + className: "ZohoObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const NetezzaTableDataset: coreHttp.CompositeMapper = { + serializedName: "NetezzaTable", + type: { + name: "Composite", + className: "NetezzaTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const VerticaTableDataset: coreHttp.CompositeMapper = { + serializedName: "VerticaTable", + type: { + name: "Composite", + className: "VerticaTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + }, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + }, + schemaTypePropertiesSchema: { + serializedName: "typeProperties.schema", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceMarketingCloudObjectDataset: coreHttp.CompositeMapper = { + serializedName: "SalesforceMarketingCloudObject", + type: { + name: "Composite", + className: "SalesforceMarketingCloudObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ResponsysObjectDataset: coreHttp.CompositeMapper = { + serializedName: "ResponsysObject", + type: { + name: "Composite", + className: "ResponsysObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsAXResourceDataset: coreHttp.CompositeMapper = { + serializedName: "DynamicsAXResource", + type: { + name: "Composite", + className: "DynamicsAXResourceDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + path: { + serializedName: "typeProperties.path", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const OracleServiceCloudObjectDataset: coreHttp.CompositeMapper = { + serializedName: "OracleServiceCloudObject", + type: { + name: "Composite", + className: "OracleServiceCloudObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataExplorerTableDataset: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorerTable", + type: { + name: "Composite", + className: "AzureDataExplorerTableDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + table: { + serializedName: "typeProperties.table", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleAdWordsObjectDataset: coreHttp.CompositeMapper = { + serializedName: "GoogleAdWordsObject", + type: { + name: "Composite", + className: "GoogleAdWordsObjectDataset", + uberParent: "Dataset", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator, + modelProperties: { + ...Dataset.type.modelProperties, + tableName: { + serializedName: "typeProperties.tableName", + type: { + name: "any" + } + } + } + } +}; + +export const ControlActivity: coreHttp.CompositeMapper = { + serializedName: "Container", + type: { + name: "Composite", + className: "ControlActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties + } + } +}; + +export const ExecutionActivity: coreHttp.CompositeMapper = { + serializedName: "Execution", + type: { + name: "Composite", + className: "ExecutionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + ...Activity.type.modelProperties, + linkedServiceName: { + serializedName: "linkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + policy: { + serializedName: "policy", + type: { + name: "Composite", + className: "ActivityPolicy" + } + } + } + } +}; + +export const ExecutePipelineActivity: coreHttp.CompositeMapper = { + serializedName: "ExecutePipeline", + type: { + name: "Composite", + className: "ExecutePipelineActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + pipeline: { + serializedName: "typeProperties.pipeline", + type: { + name: "Composite", + className: "PipelineReference" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + waitOnCompletion: { + serializedName: "typeProperties.waitOnCompletion", + type: { + name: "Boolean" + } + } + } + } +}; + +export const IfConditionActivity: coreHttp.CompositeMapper = { + serializedName: "IfCondition", + type: { + name: "Composite", + className: "IfConditionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + expression: { + serializedName: "typeProperties.expression", + type: { + name: "Composite", + className: "Expression" + } + }, + ifTrueActivities: { + serializedName: "typeProperties.ifTrueActivities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + }, + ifFalseActivities: { + serializedName: "typeProperties.ifFalseActivities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const SwitchActivity: coreHttp.CompositeMapper = { + serializedName: "Switch", + type: { + name: "Composite", + className: "SwitchActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + on: { + serializedName: "typeProperties.on", + type: { + name: "Composite", + className: "Expression" + } + }, + cases: { + serializedName: "typeProperties.cases", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SwitchCase" + } + } + } + }, + defaultActivities: { + serializedName: "typeProperties.defaultActivities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const ForEachActivity: coreHttp.CompositeMapper = { + serializedName: "ForEach", + type: { + name: "Composite", + className: "ForEachActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + isSequential: { + serializedName: "typeProperties.isSequential", + type: { + name: "Boolean" + } + }, + batchCount: { + constraints: { + InclusiveMaximum: 50 + }, + serializedName: "typeProperties.batchCount", + type: { + name: "Number" + } + }, + items: { + serializedName: "typeProperties.items", + type: { + name: "Composite", + className: "Expression" + } + }, + activities: { + serializedName: "typeProperties.activities", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const WaitActivity: coreHttp.CompositeMapper = { + serializedName: "Wait", + type: { + name: "Composite", + className: "WaitActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + waitTimeInSeconds: { + serializedName: "typeProperties.waitTimeInSeconds", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const UntilActivity: coreHttp.CompositeMapper = { + serializedName: "Until", + type: { + name: "Composite", + className: "UntilActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + expression: { + serializedName: "typeProperties.expression", + type: { + name: "Composite", + className: "Expression" + } + }, + timeout: { + serializedName: "typeProperties.timeout", + type: { + name: "any" + } + }, + activities: { + serializedName: "typeProperties.activities", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + } + } + } +}; + +export const ValidationActivity: coreHttp.CompositeMapper = { + serializedName: "Validation", + type: { + name: "Composite", + className: "ValidationActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + timeout: { + serializedName: "typeProperties.timeout", + type: { + name: "any" + } + }, + sleep: { + serializedName: "typeProperties.sleep", + type: { + name: "any" + } + }, + minimumSize: { + serializedName: "typeProperties.minimumSize", + type: { + name: "any" + } + }, + childItems: { + serializedName: "typeProperties.childItems", + type: { + name: "any" + } + }, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } +}; + +export const FilterActivity: coreHttp.CompositeMapper = { + serializedName: "Filter", + type: { + name: "Composite", + className: "FilterActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + items: { + serializedName: "typeProperties.items", + type: { + name: "Composite", + className: "Expression" + } + }, + condition: { + serializedName: "typeProperties.condition", + type: { + name: "Composite", + className: "Expression" + } + } + } + } +}; + +export const SetVariableActivity: coreHttp.CompositeMapper = { + serializedName: "SetVariable", + type: { + name: "Composite", + className: "SetVariableActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + variableName: { + serializedName: "typeProperties.variableName", + type: { + name: "String" + } + }, + value: { + serializedName: "typeProperties.value", + type: { + name: "any" + } + } + } + } +}; + +export const AppendVariableActivity: coreHttp.CompositeMapper = { + serializedName: "AppendVariable", + type: { + name: "Composite", + className: "AppendVariableActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + variableName: { + serializedName: "typeProperties.variableName", + type: { + name: "String" + } + }, + value: { + serializedName: "typeProperties.value", + type: { + name: "any" + } + } + } + } +}; + +export const WebHookActivity: coreHttp.CompositeMapper = { + serializedName: "WebHook", + type: { + name: "Composite", + className: "WebHookActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + method: { + serializedName: "typeProperties.method", + required: true, + type: { + name: "String" + } + }, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + timeout: { + serializedName: "typeProperties.timeout", + type: { + name: "String" + } + }, + headers: { + serializedName: "typeProperties.headers", + type: { + name: "any" + } + }, + body: { + serializedName: "typeProperties.body", + type: { + name: "any" + } + }, + authentication: { + serializedName: "typeProperties.authentication", + type: { + name: "Composite", + className: "WebActivityAuthentication" + } + }, + reportStatusOnCallBack: { + serializedName: "typeProperties.reportStatusOnCallBack", + type: { + name: "any" + } + } + } + } +}; + +export const SqlPoolStoredProcedureActivity: coreHttp.CompositeMapper = { + serializedName: "SqlPoolStoredProcedure", + type: { + name: "Composite", + className: "SqlPoolStoredProcedureActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...Activity.type.modelProperties, + sqlPool: { + serializedName: "sqlPool", + type: { + name: "Composite", + className: "SqlPoolReference" + } + }, + storedProcedureName: { + serializedName: "typeProperties.storedProcedureName", + required: true, + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "typeProperties.storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + } + } + } +}; + +export const RerunTumblingWindowTrigger: coreHttp.CompositeMapper = { + serializedName: "RerunTumblingWindowTrigger", + type: { + name: "Composite", + className: "RerunTumblingWindowTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...Trigger.type.modelProperties, + parentTrigger: { + serializedName: "typeProperties.parentTrigger", + type: { + name: "any" + } + }, + requestedStartTime: { + serializedName: "typeProperties.requestedStartTime", + required: true, + type: { + name: "DateTime" + } + }, + requestedEndTime: { + serializedName: "typeProperties.requestedEndTime", + required: true, + type: { + name: "DateTime" + } + }, + maxConcurrency: { + constraints: { + InclusiveMaximum: 50, + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.maxConcurrency", + required: true, + type: { + name: "Number" + } + } + } + } +}; + +export const MultiplePipelineTrigger: coreHttp.CompositeMapper = { + serializedName: "MultiplePipelineTrigger", + type: { + name: "Composite", + className: "MultiplePipelineTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + ...Trigger.type.modelProperties, + pipelines: { + serializedName: "pipelines", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "TriggerPipelineReference" + } + } + } + } + } + } +}; + +export const TumblingWindowTrigger: coreHttp.CompositeMapper = { + serializedName: "TumblingWindowTrigger", + type: { + name: "Composite", + className: "TumblingWindowTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...Trigger.type.modelProperties, + pipeline: { + serializedName: "pipeline", + type: { + name: "Composite", + className: "TriggerPipelineReference" + } + }, + frequency: { + serializedName: "typeProperties.frequency", + required: true, + type: { + name: "String" + } + }, + interval: { + serializedName: "typeProperties.interval", + required: true, + type: { + name: "Number" + } + }, + startTime: { + serializedName: "typeProperties.startTime", + required: true, + type: { + name: "DateTime" + } + }, + endTime: { + serializedName: "typeProperties.endTime", + type: { + name: "DateTime" + } + }, + delay: { + serializedName: "typeProperties.delay", + type: { + name: "any" + } + }, + maxConcurrency: { + constraints: { + InclusiveMaximum: 50, + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.maxConcurrency", + required: true, + type: { + name: "Number" + } + }, + retryPolicy: { + serializedName: "typeProperties.retryPolicy", + type: { + name: "Composite", + className: "RetryPolicy" + } + }, + dependsOn: { + serializedName: "typeProperties.dependsOn", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DependencyReference" + } + } + } + } + } + } +}; + +export const ChainingTrigger: coreHttp.CompositeMapper = { + serializedName: "ChainingTrigger", + type: { + name: "Composite", + className: "ChainingTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...Trigger.type.modelProperties, + pipeline: { + serializedName: "pipeline", + type: { + name: "Composite", + className: "TriggerPipelineReference" + } + }, + dependsOn: { + serializedName: "typeProperties.dependsOn", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PipelineReference" + } + } + } + }, + runDimension: { + serializedName: "typeProperties.runDimension", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const MappingDataFlow: coreHttp.CompositeMapper = { + serializedName: "MappingDataFlow", + type: { + name: "Composite", + className: "MappingDataFlow", + uberParent: "DataFlow", + polymorphicDiscriminator: DataFlow.type.polymorphicDiscriminator, + modelProperties: { + ...DataFlow.type.modelProperties, + sources: { + serializedName: "typeProperties.sources", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSource" + } + } + } + }, + sinks: { + serializedName: "typeProperties.sinks", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DataFlowSink" + } + } + } + }, + transformations: { + serializedName: "typeProperties.transformations", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Transformation" + } + } + } + }, + script: { + serializedName: "typeProperties.script", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "DataFlow" + } + } + } + } +}; + +export const DatasetDebugResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Dataset" + } + } + } + } +}; + +export const LinkedServiceDebugResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceDebugResource", + modelProperties: { + ...SubResourceDebugResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "LinkedService" + } + } + } + } +}; + +export const ManagedIntegrationRuntime: coreHttp.CompositeMapper = { + serializedName: "Managed", + type: { + name: "Composite", + className: "ManagedIntegrationRuntime", + uberParent: "IntegrationRuntime", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: IntegrationRuntime.type.polymorphicDiscriminator, + modelProperties: { + ...IntegrationRuntime.type.modelProperties, + state: { + serializedName: "state", + readOnly: true, + type: { + name: "String" + } + }, + computeProperties: { + serializedName: "typeProperties.computeProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeComputeProperties" + } + }, + ssisProperties: { + serializedName: "typeProperties.ssisProperties", + type: { + name: "Composite", + className: "IntegrationRuntimeSsisProperties" + } + } + } + } +}; + +export const SelfHostedIntegrationRuntime: coreHttp.CompositeMapper = { + serializedName: "SelfHosted", + type: { + name: "Composite", + className: "SelfHostedIntegrationRuntime", + uberParent: "IntegrationRuntime", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: IntegrationRuntime.type.polymorphicDiscriminator, + modelProperties: { + ...IntegrationRuntime.type.modelProperties, + linkedInfo: { + serializedName: "typeProperties.linkedInfo", + type: { + name: "Composite", + className: "LinkedIntegrationRuntimeType" + } + } + } + } +}; + +export const SecureString: coreHttp.CompositeMapper = { + serializedName: "SecureString", + type: { + name: "Composite", + className: "SecureString", + uberParent: "SecretBase", + polymorphicDiscriminator: SecretBase.type.polymorphicDiscriminator, + modelProperties: { + ...SecretBase.type.modelProperties, + value: { + serializedName: "value", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const AzureKeyVaultSecretReference: coreHttp.CompositeMapper = { + serializedName: "AzureKeyVaultSecret", + type: { + name: "Composite", + className: "AzureKeyVaultSecretReference", + uberParent: "SecretBase", + polymorphicDiscriminator: SecretBase.type.polymorphicDiscriminator, + modelProperties: { + ...SecretBase.type.modelProperties, + store: { + serializedName: "store", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + secretName: { + serializedName: "secretName", + required: true, + type: { + name: "any" + } + }, + secretVersion: { + serializedName: "secretVersion", + type: { + name: "any" + } + } + } + } +}; + +export const DataFlowSource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowSource", + modelProperties: { + ...Transformation.type.modelProperties, + dataset: { + serializedName: "dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } +}; + +export const DataFlowSink: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowSink", + modelProperties: { + ...Transformation.type.modelProperties, + dataset: { + serializedName: "dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } +}; + +export const AzureBlobStorageLocation: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorageLocation", + type: { + name: "Composite", + className: "AzureBlobStorageLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + container: { + serializedName: "container", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSLocation: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSLocation", + type: { + name: "Composite", + className: "AzureBlobFSLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + fileSystem: { + serializedName: "fileSystem", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreLocation: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreLocation", + type: { + name: "Composite", + className: "AzureDataLakeStoreLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const AmazonS3Location: coreHttp.CompositeMapper = { + serializedName: "AmazonS3Location", + type: { + name: "Composite", + className: "AmazonS3Location", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + bucketName: { + serializedName: "bucketName", + type: { + name: "any" + } + }, + version: { + serializedName: "version", + type: { + name: "any" + } + } + } + } +}; + +export const FileServerLocation: coreHttp.CompositeMapper = { + serializedName: "FileServerLocation", + type: { + name: "Composite", + className: "FileServerLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const AzureFileStorageLocation: coreHttp.CompositeMapper = { + serializedName: "AzureFileStorageLocation", + type: { + name: "Composite", + className: "AzureFileStorageLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const GoogleCloudStorageLocation: coreHttp.CompositeMapper = { + serializedName: "GoogleCloudStorageLocation", + type: { + name: "Composite", + className: "GoogleCloudStorageLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + bucketName: { + serializedName: "bucketName", + type: { + name: "any" + } + }, + version: { + serializedName: "version", + type: { + name: "any" + } + } + } + } +}; + +export const FtpServerLocation: coreHttp.CompositeMapper = { + serializedName: "FtpServerLocation", + type: { + name: "Composite", + className: "FtpServerLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const SftpLocation: coreHttp.CompositeMapper = { + serializedName: "SftpLocation", + type: { + name: "Composite", + className: "SftpLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const HttpServerLocation: coreHttp.CompositeMapper = { + serializedName: "HttpServerLocation", + type: { + name: "Composite", + className: "HttpServerLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties, + relativeUrl: { + serializedName: "relativeUrl", + type: { + name: "any" + } + } + } + } +}; + +export const HdfsLocation: coreHttp.CompositeMapper = { + serializedName: "HdfsLocation", + type: { + name: "Composite", + className: "HdfsLocation", + uberParent: "DatasetLocation", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetLocation.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetLocation.type.modelProperties + } + } +}; + +export const TextFormat: coreHttp.CompositeMapper = { + serializedName: "TextFormat", + type: { + name: "Composite", + className: "TextFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties, + columnDelimiter: { + serializedName: "columnDelimiter", + type: { + name: "any" + } + }, + rowDelimiter: { + serializedName: "rowDelimiter", + type: { + name: "any" + } + }, + escapeChar: { + serializedName: "escapeChar", + type: { + name: "any" + } + }, + quoteChar: { + serializedName: "quoteChar", + type: { + name: "any" + } + }, + nullValue: { + serializedName: "nullValue", + type: { + name: "any" + } + }, + encodingName: { + serializedName: "encodingName", + type: { + name: "any" + } + }, + treatEmptyAsNull: { + serializedName: "treatEmptyAsNull", + type: { + name: "any" + } + }, + skipLineCount: { + serializedName: "skipLineCount", + type: { + name: "any" + } + }, + firstRowAsHeader: { + serializedName: "firstRowAsHeader", + type: { + name: "any" + } + } + } + } +}; + +export const JsonFormat: coreHttp.CompositeMapper = { + serializedName: "JsonFormat", + type: { + name: "Composite", + className: "JsonFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties, + filePattern: { + serializedName: "filePattern", + type: { + name: "String" + } + }, + nestingSeparator: { + serializedName: "nestingSeparator", + type: { + name: "any" + } + }, + encodingName: { + serializedName: "encodingName", + type: { + name: "any" + } + }, + jsonNodeReference: { + serializedName: "jsonNodeReference", + type: { + name: "any" + } + }, + jsonPathDefinition: { + serializedName: "jsonPathDefinition", + type: { + name: "any" + } + } + } + } +}; + +export const AvroFormat: coreHttp.CompositeMapper = { + serializedName: "AvroFormat", + type: { + name: "Composite", + className: "AvroFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties + } + } +}; + +export const OrcFormat: coreHttp.CompositeMapper = { + serializedName: "OrcFormat", + type: { + name: "Composite", + className: "OrcFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties + } + } +}; + +export const ParquetFormat: coreHttp.CompositeMapper = { + serializedName: "ParquetFormat", + type: { + name: "Composite", + className: "ParquetFormat", + uberParent: "DatasetStorageFormat", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetStorageFormat.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetStorageFormat.type.modelProperties + } + } +}; + +export const DatasetBZip2Compression: coreHttp.CompositeMapper = { + serializedName: "BZip2", + type: { + name: "Composite", + className: "DatasetBZip2Compression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties + } + } +}; + +export const DatasetGZipCompression: coreHttp.CompositeMapper = { + serializedName: "GZip", + type: { + name: "Composite", + className: "DatasetGZipCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties, + level: { + serializedName: "level", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetDeflateCompression: coreHttp.CompositeMapper = { + serializedName: "Deflate", + type: { + name: "Composite", + className: "DatasetDeflateCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties, + level: { + serializedName: "level", + type: { + name: "String" + } + } + } + } +}; + +export const DatasetZipDeflateCompression: coreHttp.CompositeMapper = { + serializedName: "ZipDeflate", + type: { + name: "Composite", + className: "DatasetZipDeflateCompression", + uberParent: "DatasetCompression", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: DatasetCompression.type.polymorphicDiscriminator, + modelProperties: { + ...DatasetCompression.type.modelProperties, + level: { + serializedName: "level", + type: { + name: "String" + } + } + } + } +}; + +export const WebAnonymousAuthentication: coreHttp.CompositeMapper = { + serializedName: "Anonymous", + type: { + name: "Composite", + className: "WebAnonymousAuthentication", + uberParent: "WebLinkedServiceTypeProperties", + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + modelProperties: { + ...WebLinkedServiceTypeProperties.type.modelProperties + } + } +}; + +export const WebBasicAuthentication: coreHttp.CompositeMapper = { + serializedName: "Basic", + type: { + name: "Composite", + className: "WebBasicAuthentication", + uberParent: "WebLinkedServiceTypeProperties", + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + modelProperties: { + ...WebLinkedServiceTypeProperties.type.modelProperties, + username: { + serializedName: "username", + required: true, + type: { + name: "any" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecretBase" + } + } + } + } +}; + +export const WebClientCertificateAuthentication: coreHttp.CompositeMapper = { + serializedName: "ClientCertificate", + type: { + name: "Composite", + className: "WebClientCertificateAuthentication", + uberParent: "WebLinkedServiceTypeProperties", + polymorphicDiscriminator: WebLinkedServiceTypeProperties.type.polymorphicDiscriminator, + modelProperties: { + ...WebLinkedServiceTypeProperties.type.modelProperties, + pfx: { + serializedName: "pfx", + type: { + name: "Composite", + className: "SecretBase" + } + }, + password: { + serializedName: "password", + type: { + name: "Composite", + className: "SecretBase" + } + } + } + } +}; + +export const AzureBlobStorageReadSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorageReadSettings", + type: { + name: "Composite", + className: "AzureBlobStorageReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + prefix: { + serializedName: "prefix", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSReadSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSReadSettings", + type: { + name: "Composite", + className: "AzureBlobFSReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreReadSettings: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreReadSettings", + type: { + name: "Composite", + className: "AzureDataLakeStoreReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonS3ReadSettings: coreHttp.CompositeMapper = { + serializedName: "AmazonS3ReadSettings", + type: { + name: "Composite", + className: "AmazonS3ReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + prefix: { + serializedName: "prefix", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const FileServerReadSettings: coreHttp.CompositeMapper = { + serializedName: "FileServerReadSettings", + type: { + name: "Composite", + className: "FileServerReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const AzureFileStorageReadSettings: coreHttp.CompositeMapper = { + serializedName: "AzureFileStorageReadSettings", + type: { + name: "Composite", + className: "AzureFileStorageReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleCloudStorageReadSettings: coreHttp.CompositeMapper = { + serializedName: "GoogleCloudStorageReadSettings", + type: { + name: "Composite", + className: "GoogleCloudStorageReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + prefix: { + serializedName: "prefix", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const FtpReadSettings: coreHttp.CompositeMapper = { + serializedName: "FtpReadSettings", + type: { + name: "Composite", + className: "FtpReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + useBinaryTransfer: { + serializedName: "useBinaryTransfer", + type: { + name: "Boolean" + } + } + } + } +}; + +export const SftpReadSettings: coreHttp.CompositeMapper = { + serializedName: "SftpReadSettings", + type: { + name: "Composite", + className: "SftpReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + } + } + } +}; + +export const HttpReadSettings: coreHttp.CompositeMapper = { + serializedName: "HttpReadSettings", + type: { + name: "Composite", + className: "HttpReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + requestMethod: { + serializedName: "requestMethod", + type: { + name: "any" + } + }, + requestBody: { + serializedName: "requestBody", + type: { + name: "any" + } + }, + additionalHeaders: { + serializedName: "additionalHeaders", + type: { + name: "any" + } + }, + requestTimeout: { + serializedName: "requestTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const HdfsReadSettings: coreHttp.CompositeMapper = { + serializedName: "HdfsReadSettings", + type: { + name: "Composite", + className: "HdfsReadSettings", + uberParent: "StoreReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreReadSettings.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + wildcardFolderPath: { + serializedName: "wildcardFolderPath", + type: { + name: "any" + } + }, + wildcardFileName: { + serializedName: "wildcardFileName", + type: { + name: "any" + } + }, + enablePartitionDiscovery: { + serializedName: "enablePartitionDiscovery", + type: { + name: "Boolean" + } + }, + modifiedDatetimeStart: { + serializedName: "modifiedDatetimeStart", + type: { + name: "any" + } + }, + modifiedDatetimeEnd: { + serializedName: "modifiedDatetimeEnd", + type: { + name: "any" + } + }, + distcpSettings: { + serializedName: "distcpSettings", + type: { + name: "Composite", + className: "DistcpSettings" + } + } + } + } +}; + +export const SftpWriteSettings: coreHttp.CompositeMapper = { + serializedName: "SftpWriteSettings", + type: { + name: "Composite", + className: "SftpWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + operationTimeout: { + serializedName: "operationTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobStorageWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobStorageWriteSettings", + type: { + name: "Composite", + className: "AzureBlobStorageWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + blockSizeInMB: { + serializedName: "blockSizeInMB", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSWriteSettings", + type: { + name: "Composite", + className: "AzureBlobFSWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties, + blockSizeInMB: { + serializedName: "blockSizeInMB", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreWriteSettings", + type: { + name: "Composite", + className: "AzureDataLakeStoreWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties + } + } +}; + +export const FileServerWriteSettings: coreHttp.CompositeMapper = { + serializedName: "FileServerWriteSettings", + type: { + name: "Composite", + className: "FileServerWriteSettings", + uberParent: "StoreWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: StoreWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...StoreWriteSettings.type.modelProperties + } + } +}; + +export const DelimitedTextReadSettings: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextReadSettings", + type: { + name: "Composite", + className: "DelimitedTextReadSettings", + uberParent: "FormatReadSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatReadSettings.type.modelProperties, + skipLineCount: { + serializedName: "skipLineCount", + type: { + name: "any" + } + } + } + } +}; + +export const AvroWriteSettings: coreHttp.CompositeMapper = { + serializedName: "AvroWriteSettings", + type: { + name: "Composite", + className: "AvroWriteSettings", + uberParent: "FormatWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatWriteSettings.type.modelProperties, + recordName: { + serializedName: "recordName", + type: { + name: "String" + } + }, + recordNamespace: { + serializedName: "recordNamespace", + type: { + name: "String" + } + } + } + } +}; + +export const DelimitedTextWriteSettings: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextWriteSettings", + type: { + name: "Composite", + className: "DelimitedTextWriteSettings", + uberParent: "FormatWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatWriteSettings.type.modelProperties, + quoteAllText: { + serializedName: "quoteAllText", + type: { + name: "any" + } + }, + fileExtension: { + serializedName: "fileExtension", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const JsonWriteSettings: coreHttp.CompositeMapper = { + serializedName: "JsonWriteSettings", + type: { + name: "Composite", + className: "JsonWriteSettings", + uberParent: "FormatWriteSettings", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: FormatWriteSettings.type.polymorphicDiscriminator, + modelProperties: { + ...FormatWriteSettings.type.modelProperties, + filePattern: { + serializedName: "filePattern", + type: { + name: "String" + } + } + } + } +}; + +export const AvroSource: coreHttp.CompositeMapper = { + serializedName: "AvroSource", + type: { + name: "Composite", + className: "AvroSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const ParquetSource: coreHttp.CompositeMapper = { + serializedName: "ParquetSource", + type: { + name: "Composite", + className: "ParquetSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const DelimitedTextSource: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextSource", + type: { + name: "Composite", + className: "DelimitedTextSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "DelimitedTextReadSettings" + } + } + } + } +}; + +export const JsonSource: coreHttp.CompositeMapper = { + serializedName: "JsonSource", + type: { + name: "Composite", + className: "JsonSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const OrcSource: coreHttp.CompositeMapper = { + serializedName: "OrcSource", + type: { + name: "Composite", + className: "OrcSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const BinarySource: coreHttp.CompositeMapper = { + serializedName: "BinarySource", + type: { + name: "Composite", + className: "BinarySource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreReadSettings" + } + } + } + } +}; + +export const TabularSource: coreHttp.CompositeMapper = { + serializedName: "TabularSource", + type: { + name: "Composite", + className: "TabularSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + ...CopySource.type.modelProperties, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const BlobSource: coreHttp.CompositeMapper = { + serializedName: "BlobSource", + type: { + name: "Composite", + className: "BlobSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + treatEmptyAsNull: { + serializedName: "treatEmptyAsNull", + type: { + name: "any" + } + }, + skipHeaderLineCount: { + serializedName: "skipHeaderLineCount", + type: { + name: "any" + } + }, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const DocumentDbCollectionSource: coreHttp.CompositeMapper = { + serializedName: "DocumentDbCollectionSource", + type: { + name: "Composite", + className: "DocumentDbCollectionSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + nestingSeparator: { + serializedName: "nestingSeparator", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbSqlApiSource: coreHttp.CompositeMapper = { + serializedName: "CosmosDbSqlApiSource", + type: { + name: "Composite", + className: "CosmosDbSqlApiSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + pageSize: { + serializedName: "pageSize", + type: { + name: "any" + } + }, + preferredRegions: { + serializedName: "preferredRegions", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsSource: coreHttp.CompositeMapper = { + serializedName: "DynamicsSource", + type: { + name: "Composite", + className: "DynamicsSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsCrmSource: coreHttp.CompositeMapper = { + serializedName: "DynamicsCrmSource", + type: { + name: "Composite", + className: "DynamicsCrmSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const CommonDataServiceForAppsSource: coreHttp.CompositeMapper = { + serializedName: "CommonDataServiceForAppsSource", + type: { + name: "Composite", + className: "CommonDataServiceForAppsSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const RelationalSource: coreHttp.CompositeMapper = { + serializedName: "RelationalSource", + type: { + name: "Composite", + className: "RelationalSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MicrosoftAccessSource: coreHttp.CompositeMapper = { + serializedName: "MicrosoftAccessSource", + type: { + name: "Composite", + className: "MicrosoftAccessSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ODataSource: coreHttp.CompositeMapper = { + serializedName: "ODataSource", + type: { + name: "Composite", + className: "ODataSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudSource: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloudSource", + type: { + name: "Composite", + className: "SalesforceServiceCloudSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + readBehavior: { + serializedName: "readBehavior", + type: { + name: "String" + } + } + } + } +}; + +export const RestSource: coreHttp.CompositeMapper = { + serializedName: "RestSource", + type: { + name: "Composite", + className: "RestSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + requestMethod: { + serializedName: "requestMethod", + type: { + name: "any" + } + }, + requestBody: { + serializedName: "requestBody", + type: { + name: "any" + } + }, + additionalHeaders: { + serializedName: "additionalHeaders", + type: { + name: "any" + } + }, + paginationRules: { + serializedName: "paginationRules", + type: { + name: "any" + } + }, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } + }, + requestInterval: { + serializedName: "requestInterval", + type: { + name: "any" + } + } + } + } +}; + +export const FileSystemSource: coreHttp.CompositeMapper = { + serializedName: "FileSystemSource", + type: { + name: "Composite", + className: "FileSystemSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const HdfsSource: coreHttp.CompositeMapper = { + serializedName: "HdfsSource", + type: { + name: "Composite", + className: "HdfsSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + }, + distcpSettings: { + serializedName: "distcpSettings", + type: { + name: "Composite", + className: "DistcpSettings" + } + } + } + } +}; + +export const AzureDataExplorerSource: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorerSource", + type: { + name: "Composite", + className: "AzureDataExplorerSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + required: true, + type: { + name: "any" + } + }, + noTruncation: { + serializedName: "noTruncation", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const OracleSource: coreHttp.CompositeMapper = { + serializedName: "OracleSource", + type: { + name: "Composite", + className: "OracleSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + oracleReaderQuery: { + serializedName: "oracleReaderQuery", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "OraclePartitionSettings" + } + } + } + } +}; + +export const WebSource: coreHttp.CompositeMapper = { + serializedName: "WebSource", + type: { + name: "Composite", + className: "WebSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties + } + } +}; + +export const MongoDbSource: coreHttp.CompositeMapper = { + serializedName: "MongoDbSource", + type: { + name: "Composite", + className: "MongoDbSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MongoDbV2Source: coreHttp.CompositeMapper = { + serializedName: "MongoDbV2Source", + type: { + name: "Composite", + className: "MongoDbV2Source", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + filter: { + serializedName: "filter", + type: { + name: "any" + } + }, + cursorMethods: { + serializedName: "cursorMethods", + type: { + name: "Composite", + className: "MongoDbCursorMethodsProperties" + } + }, + batchSize: { + serializedName: "batchSize", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiSource: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApiSource", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + filter: { + serializedName: "filter", + type: { + name: "any" + } + }, + cursorMethods: { + serializedName: "cursorMethods", + type: { + name: "Composite", + className: "MongoDbCursorMethodsProperties" + } + }, + batchSize: { + serializedName: "batchSize", + type: { + name: "any" + } + }, + queryTimeout: { + serializedName: "queryTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const Office365Source: coreHttp.CompositeMapper = { + serializedName: "Office365Source", + type: { + name: "Composite", + className: "Office365Source", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + allowedGroups: { + serializedName: "allowedGroups", + type: { + name: "any" + } + }, + userScopeFilterUri: { + serializedName: "userScopeFilterUri", + type: { + name: "any" + } + }, + dateFilterColumn: { + serializedName: "dateFilterColumn", + type: { + name: "any" + } + }, + startTime: { + serializedName: "startTime", + type: { + name: "any" + } + }, + endTime: { + serializedName: "endTime", + type: { + name: "any" + } + }, + outputColumns: { + serializedName: "outputColumns", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreSource: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreSource", + type: { + name: "Composite", + className: "AzureDataLakeStoreSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSSource: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSSource", + type: { + name: "Composite", + className: "AzureBlobFSSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + treatEmptyAsNull: { + serializedName: "treatEmptyAsNull", + type: { + name: "any" + } + }, + skipHeaderLineCount: { + serializedName: "skipHeaderLineCount", + type: { + name: "any" + } + }, + recursive: { + serializedName: "recursive", + type: { + name: "any" + } + } + } + } +}; + +export const HttpSource: coreHttp.CompositeMapper = { + serializedName: "HttpSource", + type: { + name: "Composite", + className: "HttpSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...CopySource.type.modelProperties, + httpRequestTimeout: { + serializedName: "httpRequestTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const DelimitedTextSink: coreHttp.CompositeMapper = { + serializedName: "DelimitedTextSink", + type: { + name: "Composite", + className: "DelimitedTextSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "DelimitedTextWriteSettings" + } + } + } + } +}; + +export const JsonSink: coreHttp.CompositeMapper = { + serializedName: "JsonSink", + type: { + name: "Composite", + className: "JsonSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "JsonWriteSettings" + } + } + } + } +}; + +export const OrcSink: coreHttp.CompositeMapper = { + serializedName: "OrcSink", + type: { + name: "Composite", + className: "OrcSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + } + } + } +}; + +export const AzurePostgreSqlSink: coreHttp.CompositeMapper = { + serializedName: "AzurePostgreSqlSink", + type: { + name: "Composite", + className: "AzurePostgreSqlSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMySqlSink: coreHttp.CompositeMapper = { + serializedName: "AzureMySqlSink", + type: { + name: "Composite", + className: "AzureMySqlSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const SapCloudForCustomerSink: coreHttp.CompositeMapper = { + serializedName: "SapCloudForCustomerSink", + type: { + name: "Composite", + className: "SapCloudForCustomerSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + } + } + } +}; + +export const AzureQueueSink: coreHttp.CompositeMapper = { + serializedName: "AzureQueueSink", + type: { + name: "Composite", + className: "AzureQueueSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties + } + } +}; + +export const AzureTableSink: coreHttp.CompositeMapper = { + serializedName: "AzureTableSink", + type: { + name: "Composite", + className: "AzureTableSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + azureTableDefaultPartitionKeyValue: { + serializedName: "azureTableDefaultPartitionKeyValue", + type: { + name: "any" + } + }, + azureTablePartitionKeyName: { + serializedName: "azureTablePartitionKeyName", + type: { + name: "any" + } + }, + azureTableRowKeyName: { + serializedName: "azureTableRowKeyName", + type: { + name: "any" + } + }, + azureTableInsertType: { + serializedName: "azureTableInsertType", + type: { + name: "any" + } + } + } + } +}; + +export const AvroSink: coreHttp.CompositeMapper = { + serializedName: "AvroSink", + type: { + name: "Composite", + className: "AvroSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + }, + formatSettings: { + serializedName: "formatSettings", + type: { + name: "Composite", + className: "AvroWriteSettings" + } + } + } + } +}; + +export const ParquetSink: coreHttp.CompositeMapper = { + serializedName: "ParquetSink", + type: { + name: "Composite", + className: "ParquetSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + } + } + } +}; + +export const BinarySink: coreHttp.CompositeMapper = { + serializedName: "BinarySink", + type: { + name: "Composite", + className: "BinarySink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + storeSettings: { + serializedName: "storeSettings", + type: { + name: "Composite", + className: "StoreWriteSettings" + } + } + } + } +}; + +export const BlobSink: coreHttp.CompositeMapper = { + serializedName: "BlobSink", + type: { + name: "Composite", + className: "BlobSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + blobWriterOverwriteFiles: { + serializedName: "blobWriterOverwriteFiles", + type: { + name: "any" + } + }, + blobWriterDateTimeFormat: { + serializedName: "blobWriterDateTimeFormat", + type: { + name: "any" + } + }, + blobWriterAddHeader: { + serializedName: "blobWriterAddHeader", + type: { + name: "any" + } + }, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const FileSystemSink: coreHttp.CompositeMapper = { + serializedName: "FileSystemSink", + type: { + name: "Composite", + className: "FileSystemSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const DocumentDbCollectionSink: coreHttp.CompositeMapper = { + serializedName: "DocumentDbCollectionSink", + type: { + name: "Composite", + className: "DocumentDbCollectionSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + nestingSeparator: { + serializedName: "nestingSeparator", + type: { + name: "any" + } + }, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbSqlApiSink: coreHttp.CompositeMapper = { + serializedName: "CosmosDbSqlApiSink", + type: { + name: "Composite", + className: "CosmosDbSqlApiSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const SqlSink: coreHttp.CompositeMapper = { + serializedName: "SqlSink", + type: { + name: "Composite", + className: "SqlSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + sqlWriterStoredProcedureName: { + serializedName: "sqlWriterStoredProcedureName", + type: { + name: "any" + } + }, + sqlWriterTableType: { + serializedName: "sqlWriterTableType", + type: { + name: "any" + } + }, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + storedProcedureTableTypeParameterName: { + serializedName: "storedProcedureTableTypeParameterName", + type: { + name: "any" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const SqlServerSink: coreHttp.CompositeMapper = { + serializedName: "SqlServerSink", + type: { + name: "Composite", + className: "SqlServerSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + sqlWriterStoredProcedureName: { + serializedName: "sqlWriterStoredProcedureName", + type: { + name: "any" + } + }, + sqlWriterTableType: { + serializedName: "sqlWriterTableType", + type: { + name: "any" + } + }, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + storedProcedureTableTypeParameterName: { + serializedName: "storedProcedureTableTypeParameterName", + type: { + name: "any" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlSink: coreHttp.CompositeMapper = { + serializedName: "AzureSqlSink", + type: { + name: "Composite", + className: "AzureSqlSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + sqlWriterStoredProcedureName: { + serializedName: "sqlWriterStoredProcedureName", + type: { + name: "any" + } + }, + sqlWriterTableType: { + serializedName: "sqlWriterTableType", + type: { + name: "any" + } + }, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + storedProcedureTableTypeParameterName: { + serializedName: "storedProcedureTableTypeParameterName", + type: { + name: "any" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const SqlMISink: coreHttp.CompositeMapper = { + serializedName: "SqlMISink", + type: { + name: "Composite", + className: "SqlMISink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + sqlWriterStoredProcedureName: { + serializedName: "sqlWriterStoredProcedureName", + type: { + name: "any" + } + }, + sqlWriterTableType: { + serializedName: "sqlWriterTableType", + type: { + name: "any" + } + }, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + storedProcedureTableTypeParameterName: { + serializedName: "storedProcedureTableTypeParameterName", + type: { + name: "any" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const SqlDWSink: coreHttp.CompositeMapper = { + serializedName: "SqlDWSink", + type: { + name: "Composite", + className: "SqlDWSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + }, + allowPolyBase: { + serializedName: "allowPolyBase", + type: { + name: "any" + } + }, + polyBaseSettings: { + serializedName: "polyBaseSettings", + type: { + name: "Composite", + className: "PolybaseSettings" + } + }, + allowCopyCommand: { + serializedName: "allowCopyCommand", + type: { + name: "any" + } + }, + copyCommandSettings: { + serializedName: "copyCommandSettings", + type: { + name: "Composite", + className: "DWCopyCommandSettings" + } + }, + tableOption: { + serializedName: "tableOption", + type: { + name: "any" + } + } + } + } +}; + +export const OracleSink: coreHttp.CompositeMapper = { + serializedName: "OracleSink", + type: { + name: "Composite", + className: "OracleSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataLakeStoreSink: coreHttp.CompositeMapper = { + serializedName: "AzureDataLakeStoreSink", + type: { + name: "Composite", + className: "AzureDataLakeStoreSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + }, + enableAdlsSingleFileParallel: { + serializedName: "enableAdlsSingleFileParallel", + type: { + name: "any" + } + } + } + } +}; + +export const AzureBlobFSSink: coreHttp.CompositeMapper = { + serializedName: "AzureBlobFSSink", + type: { + name: "Composite", + className: "AzureBlobFSSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + copyBehavior: { + serializedName: "copyBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSearchIndexSink: coreHttp.CompositeMapper = { + serializedName: "AzureSearchIndexSink", + type: { + name: "Composite", + className: "AzureSearchIndexSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + } + } + } +}; + +export const OdbcSink: coreHttp.CompositeMapper = { + serializedName: "OdbcSink", + type: { + name: "Composite", + className: "OdbcSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const InformixSink: coreHttp.CompositeMapper = { + serializedName: "InformixSink", + type: { + name: "Composite", + className: "InformixSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const MicrosoftAccessSink: coreHttp.CompositeMapper = { + serializedName: "MicrosoftAccessSink", + type: { + name: "Composite", + className: "MicrosoftAccessSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + preCopyScript: { + serializedName: "preCopyScript", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsSink: coreHttp.CompositeMapper = { + serializedName: "DynamicsSink", + type: { + name: "Composite", + className: "DynamicsSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + required: true, + type: { + name: "String" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + }, + alternateKeyName: { + serializedName: "alternateKeyName", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsCrmSink: coreHttp.CompositeMapper = { + serializedName: "DynamicsCrmSink", + type: { + name: "Composite", + className: "DynamicsCrmSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + required: true, + type: { + name: "String" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + }, + alternateKeyName: { + serializedName: "alternateKeyName", + type: { + name: "any" + } + } + } + } +}; + +export const CommonDataServiceForAppsSink: coreHttp.CompositeMapper = { + serializedName: "CommonDataServiceForAppsSink", + type: { + name: "Composite", + className: "CommonDataServiceForAppsSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + required: true, + type: { + name: "String" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + }, + alternateKeyName: { + serializedName: "alternateKeyName", + type: { + name: "any" + } + } + } + } +}; + +export const AzureDataExplorerSink: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorerSink", + type: { + name: "Composite", + className: "AzureDataExplorerSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + ingestionMappingName: { + serializedName: "ingestionMappingName", + type: { + name: "any" + } + }, + ingestionMappingAsJson: { + serializedName: "ingestionMappingAsJson", + type: { + name: "any" + } + }, + flushImmediately: { + serializedName: "flushImmediately", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceSink: coreHttp.CompositeMapper = { + serializedName: "SalesforceSink", + type: { + name: "Composite", + className: "SalesforceSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + }, + externalIdFieldName: { + serializedName: "externalIdFieldName", + type: { + name: "any" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceServiceCloudSink: coreHttp.CompositeMapper = { + serializedName: "SalesforceServiceCloudSink", + type: { + name: "Composite", + className: "SalesforceServiceCloudSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "String" + } + }, + externalIdFieldName: { + serializedName: "externalIdFieldName", + type: { + name: "any" + } + }, + ignoreNullValues: { + serializedName: "ignoreNullValues", + type: { + name: "any" + } + } + } + } +}; + +export const CosmosDbMongoDbApiSink: coreHttp.CompositeMapper = { + serializedName: "CosmosDbMongoDbApiSink", + type: { + name: "Composite", + className: "CosmosDbMongoDbApiSink", + uberParent: "CopySink", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator, + modelProperties: { + ...CopySink.type.modelProperties, + writeBehavior: { + serializedName: "writeBehavior", + type: { + name: "any" + } + } + } + } +}; + +export const TabularTranslator: coreHttp.CompositeMapper = { + serializedName: "TabularTranslator", + type: { + name: "Composite", + className: "TabularTranslator", + uberParent: "CopyTranslator", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopyTranslator.type.polymorphicDiscriminator, + modelProperties: { + ...CopyTranslator.type.modelProperties, + columnMappings: { + serializedName: "columnMappings", + type: { + name: "any" + } + }, + schemaMapping: { + serializedName: "schemaMapping", + type: { + name: "any" + } + }, + collectionReference: { + serializedName: "collectionReference", + type: { + name: "any" + } + }, + mapComplexValuesToString: { + serializedName: "mapComplexValuesToString", + type: { + name: "any" + } + }, + mappings: { + serializedName: "mappings", + type: { + name: "any" + } + } + } + } +}; + +export const TriggerDependencyReference: coreHttp.CompositeMapper = { + serializedName: "TriggerDependencyReference", + type: { + name: "Composite", + className: "TriggerDependencyReference", + uberParent: "DependencyReference", + polymorphicDiscriminator: { + serializedName: "type", + clientName: "type" + }, + modelProperties: { + ...DependencyReference.type.modelProperties, + referenceTrigger: { + serializedName: "referenceTrigger", + type: { + name: "Composite", + className: "TriggerReference" + } + } + } + } +}; + +export const SelfDependencyTumblingWindowTriggerReference: coreHttp.CompositeMapper = { + serializedName: "SelfDependencyTumblingWindowTriggerReference", + type: { + name: "Composite", + className: "SelfDependencyTumblingWindowTriggerReference", + uberParent: "DependencyReference", + polymorphicDiscriminator: DependencyReference.type.polymorphicDiscriminator, + modelProperties: { + ...DependencyReference.type.modelProperties, + offset: { + constraints: { + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + MaxLength: 15, + MinLength: 8 + }, + serializedName: "offset", + required: true, + type: { + name: "String" + } + }, + size: { + constraints: { + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + MaxLength: 15, + MinLength: 8 + }, + serializedName: "size", + type: { + name: "String" + } + } + } + } +}; + +export const LinkedIntegrationRuntimeKeyAuthorization: coreHttp.CompositeMapper = { + serializedName: "Key", + type: { + name: "Composite", + className: "LinkedIntegrationRuntimeKeyAuthorization", + uberParent: "LinkedIntegrationRuntimeType", + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedIntegrationRuntimeType.type.modelProperties, + key: { + serializedName: "key", + type: { + name: "Composite", + className: "SecureString" + } + } + } + } +}; + +export const LinkedIntegrationRuntimeRbacAuthorization: coreHttp.CompositeMapper = { + serializedName: "RBAC", + type: { + name: "Composite", + className: "LinkedIntegrationRuntimeRbacAuthorization", + uberParent: "LinkedIntegrationRuntimeType", + polymorphicDiscriminator: LinkedIntegrationRuntimeType.type.polymorphicDiscriminator, + modelProperties: { + ...LinkedIntegrationRuntimeType.type.modelProperties, + resourceId: { + serializedName: "resourceId", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const LinkedServiceResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "LinkedServiceResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "LinkedService" + } + } + } + } +}; + +export const DatasetResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DatasetResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Dataset" + } + } + } + } +}; + +export const PipelineResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "PipelineResource", + additionalProperties: { type: { name: "Object" } }, + modelProperties: { + ...AzureEntityResource.type.modelProperties, + description: { + serializedName: "properties.description", + type: { + name: "String" + } + }, + activities: { + serializedName: "properties.activities", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Activity" + } + } + } + }, + parameters: { + serializedName: "properties.parameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "ParameterSpecification" } + } + } + }, + variables: { + serializedName: "properties.variables", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "VariableSpecification" } + } + } + }, + concurrency: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "properties.concurrency", + type: { + name: "Number" + } + }, + annotations: { + serializedName: "properties.annotations", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + runDimensions: { + serializedName: "properties.runDimensions", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + folder: { + serializedName: "properties.folder", + type: { + name: "Composite", + className: "PipelineFolder" + } + } + } + } +}; + +export const TriggerResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "TriggerResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "Trigger" + } + } + } + } +}; + +export const DataFlowResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "DataFlow" + } + } + } + } +}; + +export const SparkJobDefinitionResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobDefinitionResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "SparkJobDefinition" + } + } + } + } +}; + +export const IntegrationRuntimeResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "IntegrationRuntimeResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "IntegrationRuntime" + } + } + } + } +}; + +export const SubResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SubResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties + } + } +}; + +export const RerunTriggerResource: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "RerunTriggerResource", + modelProperties: { + ...AzureEntityResource.type.modelProperties, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "RerunTumblingWindowTrigger" + } + } + } + } +}; + +export const Workspace: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "Workspace", + modelProperties: { + ...TrackedResource.type.modelProperties, + identity: { + serializedName: "identity", + type: { + name: "Composite", + className: "ManagedIdentity" + } + }, + defaultDataLakeStorage: { + serializedName: "properties.defaultDataLakeStorage", + type: { + name: "Composite", + className: "DataLakeStorageAccountDetails" + } + }, + sqlAdministratorLoginPassword: { + serializedName: "properties.sqlAdministratorLoginPassword", + type: { + name: "String" + } + }, + managedResourceGroupName: { + serializedName: "properties.managedResourceGroupName", + type: { + name: "String" + } + }, + provisioningState: { + serializedName: "properties.provisioningState", + readOnly: true, + type: { + name: "String" + } + }, + sqlAdministratorLogin: { + serializedName: "properties.sqlAdministratorLogin", + type: { + name: "String" + } + }, + virtualNetworkProfile: { + serializedName: "properties.virtualNetworkProfile", + type: { + name: "Composite", + className: "VirtualNetworkProfile" + } + }, + connectivityEndpoints: { + serializedName: "properties.connectivityEndpoints", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + managedVirtualNetwork: { + serializedName: "properties.managedVirtualNetwork", + type: { + name: "String" + } + }, + privateEndpointConnections: { + serializedName: "properties.privateEndpointConnections", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PrivateEndpointConnection" + } + } + } + }, + encryption: { + serializedName: "properties.encryption", + type: { + name: "Composite", + className: "EncryptionDetails" + } + }, + workspaceUID: { + serializedName: "properties.workspaceUID", + readOnly: true, + type: { + name: "Uuid" + } + }, + extraProperties: { + serializedName: "properties.extraProperties", + readOnly: true, + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + managedVirtualNetworkSettings: { + serializedName: "properties.managedVirtualNetworkSettings", + type: { + name: "Composite", + className: "ManagedVirtualNetworkSettings" + } + }, + workspaceRepositoryConfiguration: { + serializedName: "properties.workspaceRepositoryConfiguration", + type: { + name: "Composite", + className: "WorkspaceRepositoryConfiguration" + } + }, + purviewConfiguration: { + serializedName: "properties.purviewConfiguration", + type: { + name: "Composite", + className: "PurviewConfiguration" + } + } + } + } +}; + +export const SqlPool: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlPool", + modelProperties: { + ...TrackedResource.type.modelProperties, + sku: { + serializedName: "sku", + type: { + name: "Composite", + className: "Sku" + } + }, + maxSizeBytes: { + serializedName: "properties.maxSizeBytes", + type: { + name: "Number" + } + }, + collation: { + serializedName: "properties.collation", + type: { + name: "String" + } + }, + sourceDatabaseId: { + serializedName: "properties.sourceDatabaseId", + type: { + name: "String" + } + }, + recoverableDatabaseId: { + serializedName: "properties.recoverableDatabaseId", + type: { + name: "String" + } + }, + provisioningState: { + serializedName: "properties.provisioningState", + type: { + name: "String" + } + }, + status: { + serializedName: "properties.status", + type: { + name: "String" + } + }, + restorePointInTime: { + serializedName: "properties.restorePointInTime", + type: { + name: "String" + } + }, + createMode: { + serializedName: "properties.createMode", + type: { + name: "String" + } + }, + creationDate: { + serializedName: "properties.creationDate", + type: { + name: "DateTime" + } + } + } + } +}; + +export const BigDataPoolResourceInfo: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "BigDataPoolResourceInfo", + modelProperties: { + ...TrackedResource.type.modelProperties, + provisioningState: { + serializedName: "properties.provisioningState", + type: { + name: "String" + } + }, + autoScale: { + serializedName: "properties.autoScale", + type: { + name: "Composite", + className: "AutoScaleProperties" + } + }, + creationDate: { + serializedName: "properties.creationDate", + type: { + name: "DateTime" + } + }, + autoPause: { + serializedName: "properties.autoPause", + type: { + name: "Composite", + className: "AutoPauseProperties" + } + }, + isComputeIsolationEnabled: { + serializedName: "properties.isComputeIsolationEnabled", + type: { + name: "Boolean" + } + }, + haveLibraryRequirementsChanged: { + serializedName: "properties.haveLibraryRequirementsChanged", + type: { + name: "Boolean" + } + }, + sessionLevelPackagesEnabled: { + serializedName: "properties.sessionLevelPackagesEnabled", + type: { + name: "Boolean" + } + }, + sparkEventsFolder: { + serializedName: "properties.sparkEventsFolder", + type: { + name: "String" + } + }, + nodeCount: { + serializedName: "properties.nodeCount", + type: { + name: "Number" + } + }, + libraryRequirements: { + serializedName: "properties.libraryRequirements", + type: { + name: "Composite", + className: "LibraryRequirements" + } + }, + sparkConfigProperties: { + serializedName: "properties.sparkConfigProperties", + type: { + name: "Composite", + className: "LibraryRequirements" + } + }, + sparkVersion: { + serializedName: "properties.sparkVersion", + type: { + name: "String" + } + }, + defaultSparkLogFolder: { + serializedName: "properties.defaultSparkLogFolder", + type: { + name: "String" + } + }, + nodeSize: { + serializedName: "properties.nodeSize", + type: { + name: "String" + } + }, + nodeSizeFamily: { + serializedName: "properties.nodeSizeFamily", + type: { + name: "String" + } + } + } + } +}; + +export const CopyActivity: coreHttp.CompositeMapper = { + serializedName: "Copy", + type: { + name: "Composite", + className: "CopyActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + inputs: { + serializedName: "inputs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + }, + outputs: { + serializedName: "outputs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + }, + source: { + serializedName: "typeProperties.source", + type: { + name: "Composite", + className: "CopySource" + } + }, + sink: { + serializedName: "typeProperties.sink", + type: { + name: "Composite", + className: "CopySink" + } + }, + translator: { + serializedName: "typeProperties.translator", + type: { + name: "any" + } + }, + enableStaging: { + serializedName: "typeProperties.enableStaging", + type: { + name: "any" + } + }, + stagingSettings: { + serializedName: "typeProperties.stagingSettings", + type: { + name: "Composite", + className: "StagingSettings" + } + }, + parallelCopies: { + serializedName: "typeProperties.parallelCopies", + type: { + name: "any" + } + }, + dataIntegrationUnits: { + serializedName: "typeProperties.dataIntegrationUnits", + type: { + name: "any" + } + }, + enableSkipIncompatibleRow: { + serializedName: "typeProperties.enableSkipIncompatibleRow", + type: { + name: "any" + } + }, + redirectIncompatibleRowSettings: { + serializedName: "typeProperties.redirectIncompatibleRowSettings", + type: { + name: "Composite", + className: "RedirectIncompatibleRowSettings" + } + }, + preserveRules: { + serializedName: "typeProperties.preserveRules", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + preserve: { + serializedName: "typeProperties.preserve", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + } + } + } +}; + +export const HDInsightHiveActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightHive", + type: { + name: "Composite", + className: "HDInsightHiveActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storageLinkedServices: { + serializedName: "typeProperties.storageLinkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + scriptPath: { + serializedName: "typeProperties.scriptPath", + type: { + name: "any" + } + }, + scriptLinkedService: { + serializedName: "typeProperties.scriptLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + defines: { + serializedName: "typeProperties.defines", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + variables: { + serializedName: "typeProperties.variables", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + queryTimeout: { + serializedName: "typeProperties.queryTimeout", + type: { + name: "Number" + } + } + } + } +}; + +export const HDInsightPigActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightPig", + type: { + name: "Composite", + className: "HDInsightPigActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storageLinkedServices: { + serializedName: "typeProperties.storageLinkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "any" + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + scriptPath: { + serializedName: "typeProperties.scriptPath", + type: { + name: "any" + } + }, + scriptLinkedService: { + serializedName: "typeProperties.scriptLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + defines: { + serializedName: "typeProperties.defines", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const HDInsightMapReduceActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightMapReduce", + type: { + name: "Composite", + className: "HDInsightMapReduceActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storageLinkedServices: { + serializedName: "typeProperties.storageLinkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + className: { + serializedName: "typeProperties.className", + required: true, + type: { + name: "any" + } + }, + jarFilePath: { + serializedName: "typeProperties.jarFilePath", + required: true, + type: { + name: "any" + } + }, + jarLinkedService: { + serializedName: "typeProperties.jarLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + jarLibs: { + serializedName: "typeProperties.jarLibs", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + defines: { + serializedName: "typeProperties.defines", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const HDInsightStreamingActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightStreaming", + type: { + name: "Composite", + className: "HDInsightStreamingActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storageLinkedServices: { + serializedName: "typeProperties.storageLinkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + mapper: { + serializedName: "typeProperties.mapper", + required: true, + type: { + name: "any" + } + }, + reducer: { + serializedName: "typeProperties.reducer", + required: true, + type: { + name: "any" + } + }, + input: { + serializedName: "typeProperties.input", + required: true, + type: { + name: "any" + } + }, + output: { + serializedName: "typeProperties.output", + required: true, + type: { + name: "any" + } + }, + filePaths: { + serializedName: "typeProperties.filePaths", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + fileLinkedService: { + serializedName: "typeProperties.fileLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + combiner: { + serializedName: "typeProperties.combiner", + type: { + name: "any" + } + }, + commandEnvironment: { + serializedName: "typeProperties.commandEnvironment", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + defines: { + serializedName: "typeProperties.defines", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const HDInsightSparkActivity: coreHttp.CompositeMapper = { + serializedName: "HDInsightSpark", + type: { + name: "Composite", + className: "HDInsightSparkActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + rootPath: { + serializedName: "typeProperties.rootPath", + required: true, + type: { + name: "any" + } + }, + entryFilePath: { + serializedName: "typeProperties.entryFilePath", + required: true, + type: { + name: "any" + } + }, + arguments: { + serializedName: "typeProperties.arguments", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + getDebugInfo: { + serializedName: "typeProperties.getDebugInfo", + type: { + name: "String" + } + }, + sparkJobLinkedService: { + serializedName: "typeProperties.sparkJobLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + className: { + serializedName: "typeProperties.className", + type: { + name: "String" + } + }, + proxyUser: { + serializedName: "typeProperties.proxyUser", + type: { + name: "any" + } + }, + sparkConfig: { + serializedName: "typeProperties.sparkConfig", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const ExecuteSsisPackageActivity: coreHttp.CompositeMapper = { + serializedName: "ExecuteSSISPackage", + type: { + name: "Composite", + className: "ExecuteSsisPackageActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + packageLocation: { + serializedName: "typeProperties.packageLocation", + type: { + name: "Composite", + className: "SsisPackageLocation" + } + }, + runtime: { + serializedName: "typeProperties.runtime", + type: { + name: "any" + } + }, + loggingLevel: { + serializedName: "typeProperties.loggingLevel", + type: { + name: "any" + } + }, + environmentPath: { + serializedName: "typeProperties.environmentPath", + type: { + name: "any" + } + }, + executionCredential: { + serializedName: "typeProperties.executionCredential", + type: { + name: "Composite", + className: "SsisExecutionCredential" + } + }, + connectVia: { + serializedName: "typeProperties.connectVia", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + }, + projectParameters: { + serializedName: "typeProperties.projectParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "SsisExecutionParameter" } + } + } + }, + packageParameters: { + serializedName: "typeProperties.packageParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "SsisExecutionParameter" } + } + } + }, + projectConnectionManagers: { + serializedName: "typeProperties.projectConnectionManagers", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + packageConnectionManagers: { + serializedName: "typeProperties.packageConnectionManagers", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + propertyOverrides: { + serializedName: "typeProperties.propertyOverrides", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "SsisPropertyOverride" } + } + } + }, + logLocation: { + serializedName: "typeProperties.logLocation", + type: { + name: "Composite", + className: "SsisLogLocation" + } + } + } + } +}; + +export const CustomActivity: coreHttp.CompositeMapper = { + serializedName: "Custom", + type: { + name: "Composite", + className: "CustomActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + command: { + serializedName: "typeProperties.command", + required: true, + type: { + name: "any" + } + }, + resourceLinkedService: { + serializedName: "typeProperties.resourceLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + folderPath: { + serializedName: "typeProperties.folderPath", + type: { + name: "any" + } + }, + referenceObjects: { + serializedName: "typeProperties.referenceObjects", + type: { + name: "Composite", + className: "CustomActivityReferenceObject" + } + }, + extendedProperties: { + serializedName: "typeProperties.extendedProperties", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + retentionTimeInDays: { + serializedName: "typeProperties.retentionTimeInDays", + type: { + name: "any" + } + } + } + } +}; + +export const SqlServerStoredProcedureActivity: coreHttp.CompositeMapper = { + serializedName: "SqlServerStoredProcedure", + type: { + name: "Composite", + className: "SqlServerStoredProcedureActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + storedProcedureName: { + serializedName: "typeProperties.storedProcedureName", + required: true, + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "typeProperties.storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + } + } + } +}; + +export const DeleteActivity: coreHttp.CompositeMapper = { + serializedName: "Delete", + type: { + name: "Composite", + className: "DeleteActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + recursive: { + serializedName: "typeProperties.recursive", + type: { + name: "any" + } + }, + maxConcurrentConnections: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "typeProperties.maxConcurrentConnections", + type: { + name: "Number" + } + }, + enableLogging: { + serializedName: "typeProperties.enableLogging", + type: { + name: "any" + } + }, + logStorageSettings: { + serializedName: "typeProperties.logStorageSettings", + type: { + name: "Composite", + className: "LogStorageSettings" + } + }, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + } +}; + +export const AzureDataExplorerCommandActivity: coreHttp.CompositeMapper = { + serializedName: "AzureDataExplorerCommand", + type: { + name: "Composite", + className: "AzureDataExplorerCommandActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + command: { + serializedName: "typeProperties.command", + required: true, + type: { + name: "any" + } + }, + commandTimeout: { + serializedName: "typeProperties.commandTimeout", + type: { + name: "any" + } + } + } + } +}; + +export const LookupActivity: coreHttp.CompositeMapper = { + serializedName: "Lookup", + type: { + name: "Composite", + className: "LookupActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + source: { + serializedName: "typeProperties.source", + type: { + name: "Composite", + className: "CopySource" + } + }, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + }, + firstRowOnly: { + serializedName: "typeProperties.firstRowOnly", + type: { + name: "any" + } + } + } + } +}; + +export const WebActivity: coreHttp.CompositeMapper = { + serializedName: "WebActivity", + type: { + name: "Composite", + className: "WebActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + method: { + serializedName: "typeProperties.method", + required: true, + type: { + name: "String" + } + }, + url: { + serializedName: "typeProperties.url", + required: true, + type: { + name: "any" + } + }, + headers: { + serializedName: "typeProperties.headers", + type: { + name: "any" + } + }, + body: { + serializedName: "typeProperties.body", + type: { + name: "any" + } + }, + authentication: { + serializedName: "typeProperties.authentication", + type: { + name: "Composite", + className: "WebActivityAuthentication" + } + }, + datasets: { + serializedName: "typeProperties.datasets", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DatasetReference" + } + } + } + }, + linkedServices: { + serializedName: "typeProperties.linkedServices", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + }, + connectVia: { + serializedName: "typeProperties.connectVia", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + } + } + } +}; + +export const GetMetadataActivity: coreHttp.CompositeMapper = { + serializedName: "GetMetadata", + type: { + name: "Composite", + className: "GetMetadataActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + dataset: { + serializedName: "typeProperties.dataset", + type: { + name: "Composite", + className: "DatasetReference" + } + }, + fieldList: { + serializedName: "typeProperties.fieldList", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + } + } + } +}; + +export const AzureMLBatchExecutionActivity: coreHttp.CompositeMapper = { + serializedName: "AzureMLBatchExecution", + type: { + name: "Composite", + className: "AzureMLBatchExecutionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + globalParameters: { + serializedName: "typeProperties.globalParameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + webServiceOutputs: { + serializedName: "typeProperties.webServiceOutputs", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "AzureMLWebServiceFile" } + } + } + }, + webServiceInputs: { + serializedName: "typeProperties.webServiceInputs", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "AzureMLWebServiceFile" } + } + } + } + } + } +}; + +export const AzureMLUpdateResourceActivity: coreHttp.CompositeMapper = { + serializedName: "AzureMLUpdateResource", + type: { + name: "Composite", + className: "AzureMLUpdateResourceActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + trainedModelName: { + serializedName: "typeProperties.trainedModelName", + required: true, + type: { + name: "any" + } + }, + trainedModelLinkedServiceName: { + serializedName: "typeProperties.trainedModelLinkedServiceName", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + trainedModelFilePath: { + serializedName: "typeProperties.trainedModelFilePath", + required: true, + type: { + name: "any" + } + } + } + } +}; + +export const AzureMLExecutePipelineActivity: coreHttp.CompositeMapper = { + serializedName: "AzureMLExecutePipeline", + type: { + name: "Composite", + className: "AzureMLExecutePipelineActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + mlPipelineId: { + serializedName: "typeProperties.mlPipelineId", + required: true, + type: { + name: "any" + } + }, + experimentName: { + serializedName: "typeProperties.experimentName", + type: { + name: "any" + } + }, + mlPipelineParameters: { + serializedName: "typeProperties.mlPipelineParameters", + type: { + name: "any" + } + }, + mlParentRunId: { + serializedName: "typeProperties.mlParentRunId", + type: { + name: "any" + } + }, + continueOnStepFailure: { + serializedName: "typeProperties.continueOnStepFailure", + type: { + name: "any" + } + } + } + } +}; + +export const DataLakeAnalyticsUsqlActivity: coreHttp.CompositeMapper = { + serializedName: "DataLakeAnalyticsU-SQL", + type: { + name: "Composite", + className: "DataLakeAnalyticsUsqlActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + scriptPath: { + serializedName: "typeProperties.scriptPath", + required: true, + type: { + name: "any" + } + }, + scriptLinkedService: { + serializedName: "typeProperties.scriptLinkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + }, + degreeOfParallelism: { + serializedName: "typeProperties.degreeOfParallelism", + type: { + name: "any" + } + }, + priority: { + serializedName: "typeProperties.priority", + type: { + name: "any" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + runtimeVersion: { + serializedName: "typeProperties.runtimeVersion", + type: { + name: "any" + } + }, + compilationMode: { + serializedName: "typeProperties.compilationMode", + type: { + name: "any" + } + } + } + } +}; + +export const DatabricksNotebookActivity: coreHttp.CompositeMapper = { + serializedName: "DatabricksNotebook", + type: { + name: "Composite", + className: "DatabricksNotebookActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + notebookPath: { + serializedName: "typeProperties.notebookPath", + required: true, + type: { + name: "any" + } + }, + baseParameters: { + serializedName: "typeProperties.baseParameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + libraries: { + serializedName: "typeProperties.libraries", + type: { + name: "Sequence", + element: { + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } + } + } +}; + +export const DatabricksSparkJarActivity: coreHttp.CompositeMapper = { + serializedName: "DatabricksSparkJar", + type: { + name: "Composite", + className: "DatabricksSparkJarActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + mainClassName: { + serializedName: "typeProperties.mainClassName", + required: true, + type: { + name: "any" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + libraries: { + serializedName: "typeProperties.libraries", + type: { + name: "Sequence", + element: { + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } + } + } +}; + +export const DatabricksSparkPythonActivity: coreHttp.CompositeMapper = { + serializedName: "DatabricksSparkPython", + type: { + name: "Composite", + className: "DatabricksSparkPythonActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + pythonFile: { + serializedName: "typeProperties.pythonFile", + required: true, + type: { + name: "any" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Sequence", + element: { + type: { + name: "any" + } + } + } + }, + libraries: { + serializedName: "typeProperties.libraries", + type: { + name: "Sequence", + element: { + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } + } + } +}; + +export const AzureFunctionActivity: coreHttp.CompositeMapper = { + serializedName: "AzureFunctionActivity", + type: { + name: "Composite", + className: "AzureFunctionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + method: { + serializedName: "typeProperties.method", + required: true, + type: { + name: "String" + } + }, + functionName: { + serializedName: "typeProperties.functionName", + required: true, + type: { + name: "any" + } + }, + headers: { + serializedName: "typeProperties.headers", + type: { + name: "any" + } + }, + body: { + serializedName: "typeProperties.body", + type: { + name: "any" + } + } + } + } +}; + +export const ExecuteDataFlowActivity: coreHttp.CompositeMapper = { + serializedName: "ExecuteDataFlow", + type: { + name: "Composite", + className: "ExecuteDataFlowActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + dataFlow: { + serializedName: "typeProperties.dataFlow", + type: { + name: "Composite", + className: "DataFlowReference" + } + }, + staging: { + serializedName: "typeProperties.staging", + type: { + name: "Composite", + className: "DataFlowStagingInfo" + } + }, + integrationRuntime: { + serializedName: "typeProperties.integrationRuntime", + type: { + name: "Composite", + className: "IntegrationRuntimeReference" + } + }, + compute: { + serializedName: "typeProperties.compute", + type: { + name: "Composite", + className: "ExecuteDataFlowActivityTypePropertiesCompute" + } + } + } + } +}; + +export const SynapseNotebookActivity: coreHttp.CompositeMapper = { + serializedName: "SynapseNotebook", + type: { + name: "Composite", + className: "SynapseNotebookActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + notebook: { + serializedName: "typeProperties.notebook", + type: { + name: "Composite", + className: "SynapseNotebookReference" + } + }, + parameters: { + serializedName: "typeProperties.parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } + } + } +}; + +export const SynapseSparkJobDefinitionActivity: coreHttp.CompositeMapper = { + serializedName: "SparkJob", + type: { + name: "Composite", + className: "SynapseSparkJobDefinitionActivity", + uberParent: "Activity", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Activity.type.polymorphicDiscriminator, + modelProperties: { + ...ExecutionActivity.type.modelProperties, + sparkJob: { + serializedName: "typeProperties.sparkJob", + type: { + name: "Composite", + className: "SynapseSparkJobReference" + } + } + } + } +}; + +export const ScheduleTrigger: coreHttp.CompositeMapper = { + serializedName: "ScheduleTrigger", + type: { + name: "Composite", + className: "ScheduleTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...MultiplePipelineTrigger.type.modelProperties, + recurrence: { + serializedName: "typeProperties.recurrence", + type: { + name: "Composite", + className: "ScheduleTriggerRecurrence" + } + } + } + } +}; + +export const BlobTrigger: coreHttp.CompositeMapper = { + serializedName: "BlobTrigger", + type: { + name: "Composite", + className: "BlobTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...MultiplePipelineTrigger.type.modelProperties, + folderPath: { + serializedName: "typeProperties.folderPath", + required: true, + type: { + name: "String" + } + }, + maxConcurrency: { + serializedName: "typeProperties.maxConcurrency", + required: true, + type: { + name: "Number" + } + }, + linkedService: { + serializedName: "typeProperties.linkedService", + type: { + name: "Composite", + className: "LinkedServiceReference" + } + } + } + } +}; + +export const BlobEventsTrigger: coreHttp.CompositeMapper = { + serializedName: "BlobEventsTrigger", + type: { + name: "Composite", + className: "BlobEventsTrigger", + uberParent: "Trigger", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: Trigger.type.polymorphicDiscriminator, + modelProperties: { + ...MultiplePipelineTrigger.type.modelProperties, + blobPathBeginsWith: { + serializedName: "typeProperties.blobPathBeginsWith", + type: { + name: "String" + } + }, + blobPathEndsWith: { + serializedName: "typeProperties.blobPathEndsWith", + type: { + name: "String" + } + }, + ignoreEmptyBlobs: { + serializedName: "typeProperties.ignoreEmptyBlobs", + type: { + name: "Boolean" + } + }, + events: { + serializedName: "typeProperties.events", + required: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + scope: { + serializedName: "typeProperties.scope", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const AzureTableSource: coreHttp.CompositeMapper = { + serializedName: "AzureTableSource", + type: { + name: "Composite", + className: "AzureTableSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + azureTableSourceQuery: { + serializedName: "azureTableSourceQuery", + type: { + name: "any" + } + }, + azureTableSourceIgnoreTableNotFound: { + serializedName: "azureTableSourceIgnoreTableNotFound", + type: { + name: "any" + } + } + } + } +}; + +export const InformixSource: coreHttp.CompositeMapper = { + serializedName: "InformixSource", + type: { + name: "Composite", + className: "InformixSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const Db2Source: coreHttp.CompositeMapper = { + serializedName: "Db2Source", + type: { + name: "Composite", + className: "Db2Source", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const OdbcSource: coreHttp.CompositeMapper = { + serializedName: "OdbcSource", + type: { + name: "Composite", + className: "OdbcSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MySqlSource: coreHttp.CompositeMapper = { + serializedName: "MySqlSource", + type: { + name: "Composite", + className: "MySqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const PostgreSqlSource: coreHttp.CompositeMapper = { + serializedName: "PostgreSqlSource", + type: { + name: "Composite", + className: "PostgreSqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SybaseSource: coreHttp.CompositeMapper = { + serializedName: "SybaseSource", + type: { + name: "Composite", + className: "SybaseSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SapBwSource: coreHttp.CompositeMapper = { + serializedName: "SapBwSource", + type: { + name: "Composite", + className: "SapBwSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceSource: coreHttp.CompositeMapper = { + serializedName: "SalesforceSource", + type: { + name: "Composite", + className: "SalesforceSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + readBehavior: { + serializedName: "readBehavior", + type: { + name: "String" + } + } + } + } +}; + +export const SapCloudForCustomerSource: coreHttp.CompositeMapper = { + serializedName: "SapCloudForCustomerSource", + type: { + name: "Composite", + className: "SapCloudForCustomerSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SapEccSource: coreHttp.CompositeMapper = { + serializedName: "SapEccSource", + type: { + name: "Composite", + className: "SapEccSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SapHanaSource: coreHttp.CompositeMapper = { + serializedName: "SapHanaSource", + type: { + name: "Composite", + className: "SapHanaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + packetSize: { + serializedName: "packetSize", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SapHanaPartitionSettings" + } + } + } + } +}; + +export const SapOpenHubSource: coreHttp.CompositeMapper = { + serializedName: "SapOpenHubSource", + type: { + name: "Composite", + className: "SapOpenHubSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + excludeLastRequest: { + serializedName: "excludeLastRequest", + type: { + name: "any" + } + }, + baseRequestId: { + serializedName: "baseRequestId", + type: { + name: "any" + } + } + } + } +}; + +export const SapTableSource: coreHttp.CompositeMapper = { + serializedName: "SapTableSource", + type: { + name: "Composite", + className: "SapTableSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + rowCount: { + serializedName: "rowCount", + type: { + name: "any" + } + }, + rowSkips: { + serializedName: "rowSkips", + type: { + name: "any" + } + }, + rfcTableFields: { + serializedName: "rfcTableFields", + type: { + name: "any" + } + }, + rfcTableOptions: { + serializedName: "rfcTableOptions", + type: { + name: "any" + } + }, + batchSize: { + serializedName: "batchSize", + type: { + name: "any" + } + }, + customRfcReadTableFunctionModule: { + serializedName: "customRfcReadTableFunctionModule", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "SapTablePartitionSettings" + } + } + } + } +}; + +export const SqlSource: coreHttp.CompositeMapper = { + serializedName: "SqlSource", + type: { + name: "Composite", + className: "SqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + } + } + } +}; + +export const SqlServerSource: coreHttp.CompositeMapper = { + serializedName: "SqlServerSource", + type: { + name: "Composite", + className: "SqlServerSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + produceAdditionalTypes: { + serializedName: "produceAdditionalTypes", + type: { + name: "any" + } + } + } + } +}; + +export const AzureSqlSource: coreHttp.CompositeMapper = { + serializedName: "AzureSqlSource", + type: { + name: "Composite", + className: "AzureSqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + produceAdditionalTypes: { + serializedName: "produceAdditionalTypes", + type: { + name: "any" + } + } + } + } +}; + +export const SqlMISource: coreHttp.CompositeMapper = { + serializedName: "SqlMISource", + type: { + name: "Composite", + className: "SqlMISource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "Dictionary", + value: { + type: { name: "Composite", className: "StoredProcedureParameter" } + } + } + }, + produceAdditionalTypes: { + serializedName: "produceAdditionalTypes", + type: { + name: "any" + } + } + } + } +}; + +export const SqlDWSource: coreHttp.CompositeMapper = { + serializedName: "SqlDWSource", + type: { + name: "Composite", + className: "SqlDWSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + sqlReaderQuery: { + serializedName: "sqlReaderQuery", + type: { + name: "any" + } + }, + sqlReaderStoredProcedureName: { + serializedName: "sqlReaderStoredProcedureName", + type: { + name: "any" + } + }, + storedProcedureParameters: { + serializedName: "storedProcedureParameters", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMySqlSource: coreHttp.CompositeMapper = { + serializedName: "AzureMySqlSource", + type: { + name: "Composite", + className: "AzureMySqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const TeradataSource: coreHttp.CompositeMapper = { + serializedName: "TeradataSource", + type: { + name: "Composite", + className: "TeradataSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "TeradataPartitionSettings" + } + } + } + } +}; + +export const CassandraSource: coreHttp.CompositeMapper = { + serializedName: "CassandraSource", + type: { + name: "Composite", + className: "CassandraSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + consistencyLevel: { + serializedName: "consistencyLevel", + type: { + name: "String" + } + } + } + } +}; + +export const AmazonMWSSource: coreHttp.CompositeMapper = { + serializedName: "AmazonMWSSource", + type: { + name: "Composite", + className: "AmazonMWSSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const AzurePostgreSqlSource: coreHttp.CompositeMapper = { + serializedName: "AzurePostgreSqlSource", + type: { + name: "Composite", + className: "AzurePostgreSqlSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ConcurSource: coreHttp.CompositeMapper = { + serializedName: "ConcurSource", + type: { + name: "Composite", + className: "ConcurSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const CouchbaseSource: coreHttp.CompositeMapper = { + serializedName: "CouchbaseSource", + type: { + name: "Composite", + className: "CouchbaseSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const DrillSource: coreHttp.CompositeMapper = { + serializedName: "DrillSource", + type: { + name: "Composite", + className: "DrillSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const EloquaSource: coreHttp.CompositeMapper = { + serializedName: "EloquaSource", + type: { + name: "Composite", + className: "EloquaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleBigQuerySource: coreHttp.CompositeMapper = { + serializedName: "GoogleBigQuerySource", + type: { + name: "Composite", + className: "GoogleBigQuerySource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const GreenplumSource: coreHttp.CompositeMapper = { + serializedName: "GreenplumSource", + type: { + name: "Composite", + className: "GreenplumSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const HBaseSource: coreHttp.CompositeMapper = { + serializedName: "HBaseSource", + type: { + name: "Composite", + className: "HBaseSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const HiveSource: coreHttp.CompositeMapper = { + serializedName: "HiveSource", + type: { + name: "Composite", + className: "HiveSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const HubspotSource: coreHttp.CompositeMapper = { + serializedName: "HubspotSource", + type: { + name: "Composite", + className: "HubspotSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ImpalaSource: coreHttp.CompositeMapper = { + serializedName: "ImpalaSource", + type: { + name: "Composite", + className: "ImpalaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const JiraSource: coreHttp.CompositeMapper = { + serializedName: "JiraSource", + type: { + name: "Composite", + className: "JiraSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MagentoSource: coreHttp.CompositeMapper = { + serializedName: "MagentoSource", + type: { + name: "Composite", + className: "MagentoSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MariaDBSource: coreHttp.CompositeMapper = { + serializedName: "MariaDBSource", + type: { + name: "Composite", + className: "MariaDBSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const AzureMariaDBSource: coreHttp.CompositeMapper = { + serializedName: "AzureMariaDBSource", + type: { + name: "Composite", + className: "AzureMariaDBSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const MarketoSource: coreHttp.CompositeMapper = { + serializedName: "MarketoSource", + type: { + name: "Composite", + className: "MarketoSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const PaypalSource: coreHttp.CompositeMapper = { + serializedName: "PaypalSource", + type: { + name: "Composite", + className: "PaypalSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const PhoenixSource: coreHttp.CompositeMapper = { + serializedName: "PhoenixSource", + type: { + name: "Composite", + className: "PhoenixSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const PrestoSource: coreHttp.CompositeMapper = { + serializedName: "PrestoSource", + type: { + name: "Composite", + className: "PrestoSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const QuickBooksSource: coreHttp.CompositeMapper = { + serializedName: "QuickBooksSource", + type: { + name: "Composite", + className: "QuickBooksSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ServiceNowSource: coreHttp.CompositeMapper = { + serializedName: "ServiceNowSource", + type: { + name: "Composite", + className: "ServiceNowSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ShopifySource: coreHttp.CompositeMapper = { + serializedName: "ShopifySource", + type: { + name: "Composite", + className: "ShopifySource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SparkSource: coreHttp.CompositeMapper = { + serializedName: "SparkSource", + type: { + name: "Composite", + className: "SparkSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SquareSource: coreHttp.CompositeMapper = { + serializedName: "SquareSource", + type: { + name: "Composite", + className: "SquareSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const XeroSource: coreHttp.CompositeMapper = { + serializedName: "XeroSource", + type: { + name: "Composite", + className: "XeroSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ZohoSource: coreHttp.CompositeMapper = { + serializedName: "ZohoSource", + type: { + name: "Composite", + className: "ZohoSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const NetezzaSource: coreHttp.CompositeMapper = { + serializedName: "NetezzaSource", + type: { + name: "Composite", + className: "NetezzaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + partitionOption: { + serializedName: "partitionOption", + type: { + name: "String" + } + }, + partitionSettings: { + serializedName: "partitionSettings", + type: { + name: "Composite", + className: "NetezzaPartitionSettings" + } + } + } + } +}; + +export const VerticaSource: coreHttp.CompositeMapper = { + serializedName: "VerticaSource", + type: { + name: "Composite", + className: "VerticaSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const SalesforceMarketingCloudSource: coreHttp.CompositeMapper = { + serializedName: "SalesforceMarketingCloudSource", + type: { + name: "Composite", + className: "SalesforceMarketingCloudSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const ResponsysSource: coreHttp.CompositeMapper = { + serializedName: "ResponsysSource", + type: { + name: "Composite", + className: "ResponsysSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const DynamicsAXSource: coreHttp.CompositeMapper = { + serializedName: "DynamicsAXSource", + type: { + name: "Composite", + className: "DynamicsAXSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const OracleServiceCloudSource: coreHttp.CompositeMapper = { + serializedName: "OracleServiceCloudSource", + type: { + name: "Composite", + className: "OracleServiceCloudSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const GoogleAdWordsSource: coreHttp.CompositeMapper = { + serializedName: "GoogleAdWordsSource", + type: { + name: "Composite", + className: "GoogleAdWordsSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + } + } + } +}; + +export const AmazonRedshiftSource: coreHttp.CompositeMapper = { + serializedName: "AmazonRedshiftSource", + type: { + name: "Composite", + className: "AmazonRedshiftSource", + uberParent: "CopySource", + additionalProperties: { type: { name: "Object" } }, + polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator, + modelProperties: { + ...TabularSource.type.modelProperties, + query: { + serializedName: "query", + type: { + name: "any" + } + }, + redshiftUnloadSettings: { + serializedName: "redshiftUnloadSettings", + type: { + name: "Composite", + className: "RedshiftUnloadSettings" + } + } + } + } +}; + +export const TumblingWindowTriggerDependencyReference: coreHttp.CompositeMapper = { + serializedName: "TumblingWindowTriggerDependencyReference", + type: { + name: "Composite", + className: "TumblingWindowTriggerDependencyReference", + uberParent: "DependencyReference", + polymorphicDiscriminator: DependencyReference.type.polymorphicDiscriminator, + modelProperties: { + ...TriggerDependencyReference.type.modelProperties, + offset: { + constraints: { + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + MaxLength: 15, + MinLength: 8 + }, + serializedName: "offset", + type: { + name: "String" + } + }, + size: { + constraints: { + Pattern: new RegExp("((d+).)?(dd):(60|([0-5][0-9])):(60|([0-5][0-9]))"), + MaxLength: 15, + MinLength: 8 + }, + serializedName: "size", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugSessionCreateDataFlowDebugSessionHeaders: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugSessionCreateDataFlowDebugSessionHeaders", + modelProperties: { + location: { + serializedName: "location", + type: { + name: "String" + } + } + } + } +}; + +export const DataFlowDebugSessionExecuteCommandHeaders: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "DataFlowDebugSessionExecuteCommandHeaders", + modelProperties: { + location: { + serializedName: "location", + type: { + name: "String" + } + } + } + } +}; + +export let discriminators = { + LinkedService: LinkedService, + Dataset: Dataset, + Activity: Activity, + Trigger: Trigger, + DataFlow: DataFlow, + IntegrationRuntime: IntegrationRuntime, + SecretBase: SecretBase, + DatasetLocation: DatasetLocation, + DatasetStorageFormat: DatasetStorageFormat, + DatasetCompression: DatasetCompression, + WebLinkedServiceTypeProperties: WebLinkedServiceTypeProperties, + StoreReadSettings: StoreReadSettings, + StoreWriteSettings: StoreWriteSettings, + FormatReadSettings: FormatReadSettings, + FormatWriteSettings: FormatWriteSettings, + CopySource: CopySource, + CopySink: CopySink, + CopyTranslator: CopyTranslator, + DependencyReference: DependencyReference, + "CustomSetupBase.undefined": CustomSetupBase, + LinkedIntegrationRuntimeType: LinkedIntegrationRuntimeType, + "LinkedService.AzureStorage": AzureStorageLinkedService, + "LinkedService.AzureBlobStorage": AzureBlobStorageLinkedService, + "LinkedService.AzureTableStorage": AzureTableStorageLinkedService, + "LinkedService.AzureSqlDW": AzureSqlDWLinkedService, + "LinkedService.SqlServer": SqlServerLinkedService, + "LinkedService.AzureSqlDatabase": AzureSqlDatabaseLinkedService, + "LinkedService.AzureSqlMI": AzureSqlMILinkedService, + "LinkedService.AzureBatch": AzureBatchLinkedService, + "LinkedService.AzureKeyVault": AzureKeyVaultLinkedService, + "LinkedService.CosmosDb": CosmosDbLinkedService, + "LinkedService.Dynamics": DynamicsLinkedService, + "LinkedService.DynamicsCrm": DynamicsCrmLinkedService, + "LinkedService.CommonDataServiceForApps": CommonDataServiceForAppsLinkedService, + "LinkedService.HDInsight": HDInsightLinkedService, + "LinkedService.FileServer": FileServerLinkedService, + "LinkedService.AzureFileStorage": AzureFileStorageLinkedService, + "LinkedService.GoogleCloudStorage": GoogleCloudStorageLinkedService, + "LinkedService.Oracle": OracleLinkedService, + "LinkedService.AzureMySql": AzureMySqlLinkedService, + "LinkedService.MySql": MySqlLinkedService, + "LinkedService.PostgreSql": PostgreSqlLinkedService, + "LinkedService.Sybase": SybaseLinkedService, + "LinkedService.Db2": Db2LinkedService, + "LinkedService.Teradata": TeradataLinkedService, + "LinkedService.AzureML": AzureMLLinkedService, + "LinkedService.AzureMLService": AzureMLServiceLinkedService, + "LinkedService.Odbc": OdbcLinkedService, + "LinkedService.Informix": InformixLinkedService, + "LinkedService.MicrosoftAccess": MicrosoftAccessLinkedService, + "LinkedService.Hdfs": HdfsLinkedService, + "LinkedService.OData": ODataLinkedService, + "LinkedService.Web": WebLinkedService, + "LinkedService.Cassandra": CassandraLinkedService, + "LinkedService.MongoDb": MongoDbLinkedService, + "LinkedService.MongoDbV2": MongoDbV2LinkedService, + "LinkedService.CosmosDbMongoDbApi": CosmosDbMongoDbApiLinkedService, + "LinkedService.AzureDataLakeStore": AzureDataLakeStoreLinkedService, + "LinkedService.AzureBlobFS": AzureBlobFSLinkedService, + "LinkedService.Office365": Office365LinkedService, + "LinkedService.Salesforce": SalesforceLinkedService, + "LinkedService.SalesforceServiceCloud": SalesforceServiceCloudLinkedService, + "LinkedService.SapCloudForCustomer": SapCloudForCustomerLinkedService, + "LinkedService.SapEcc": SapEccLinkedService, + "LinkedService.SapOpenHub": SapOpenHubLinkedService, + "LinkedService.RestService": RestServiceLinkedService, + "LinkedService.AmazonS3": AmazonS3LinkedService, + "LinkedService.AmazonRedshift": AmazonRedshiftLinkedService, + "LinkedService.CustomDataSource": CustomDataSourceLinkedService, + "LinkedService.AzureSearch": AzureSearchLinkedService, + "LinkedService.HttpServer": HttpLinkedService, + "LinkedService.FtpServer": FtpServerLinkedService, + "LinkedService.Sftp": SftpServerLinkedService, + "LinkedService.SapBW": SapBWLinkedService, + "LinkedService.SapHana": SapHanaLinkedService, + "LinkedService.AmazonMWS": AmazonMWSLinkedService, + "LinkedService.AzurePostgreSql": AzurePostgreSqlLinkedService, + "LinkedService.Concur": ConcurLinkedService, + "LinkedService.Couchbase": CouchbaseLinkedService, + "LinkedService.Drill": DrillLinkedService, + "LinkedService.Eloqua": EloquaLinkedService, + "LinkedService.GoogleBigQuery": GoogleBigQueryLinkedService, + "LinkedService.Greenplum": GreenplumLinkedService, + "LinkedService.HBase": HBaseLinkedService, + "LinkedService.Hive": HiveLinkedService, + "LinkedService.Hubspot": HubspotLinkedService, + "LinkedService.Impala": ImpalaLinkedService, + "LinkedService.Jira": JiraLinkedService, + "LinkedService.Magento": MagentoLinkedService, + "LinkedService.MariaDB": MariaDBLinkedService, + "LinkedService.AzureMariaDB": AzureMariaDBLinkedService, + "LinkedService.Marketo": MarketoLinkedService, + "LinkedService.Paypal": PaypalLinkedService, + "LinkedService.Phoenix": PhoenixLinkedService, + "LinkedService.Presto": PrestoLinkedService, + "LinkedService.QuickBooks": QuickBooksLinkedService, + "LinkedService.ServiceNow": ServiceNowLinkedService, + "LinkedService.Shopify": ShopifyLinkedService, + "LinkedService.Spark": SparkLinkedService, + "LinkedService.Square": SquareLinkedService, + "LinkedService.Xero": XeroLinkedService, + "LinkedService.Zoho": ZohoLinkedService, + "LinkedService.Vertica": VerticaLinkedService, + "LinkedService.Netezza": NetezzaLinkedService, + "LinkedService.SalesforceMarketingCloud": SalesforceMarketingCloudLinkedService, + "LinkedService.HDInsightOnDemand": HDInsightOnDemandLinkedService, + "LinkedService.AzureDataLakeAnalytics": AzureDataLakeAnalyticsLinkedService, + "LinkedService.AzureDatabricks": AzureDatabricksLinkedService, + "LinkedService.Responsys": ResponsysLinkedService, + "LinkedService.DynamicsAX": DynamicsAXLinkedService, + "LinkedService.OracleServiceCloud": OracleServiceCloudLinkedService, + "LinkedService.GoogleAdWords": GoogleAdWordsLinkedService, + "LinkedService.SapTable": SapTableLinkedService, + "LinkedService.AzureDataExplorer": AzureDataExplorerLinkedService, + "LinkedService.AzureFunction": AzureFunctionLinkedService, + "Dataset.Avro": AvroDataset, + "Dataset.Parquet": ParquetDataset, + "Dataset.DelimitedText": DelimitedTextDataset, + "Dataset.Json": JsonDataset, + "Dataset.Orc": OrcDataset, + "Dataset.Binary": BinaryDataset, + "Dataset.AzureTable": AzureTableDataset, + "Dataset.AzureSqlTable": AzureSqlTableDataset, + "Dataset.AzureSqlMITable": AzureSqlMITableDataset, + "Dataset.AzureSqlDWTable": AzureSqlDWTableDataset, + "Dataset.CassandraTable": CassandraTableDataset, + "Dataset.CustomDataset": CustomDataset, + "Dataset.CosmosDbSqlApiCollection": CosmosDbSqlApiCollectionDataset, + "Dataset.DocumentDbCollection": DocumentDbCollectionDataset, + "Dataset.DynamicsEntity": DynamicsEntityDataset, + "Dataset.DynamicsCrmEntity": DynamicsCrmEntityDataset, + "Dataset.CommonDataServiceForAppsEntity": CommonDataServiceForAppsEntityDataset, + "Dataset.Office365Table": Office365Dataset, + "Dataset.MongoDbCollection": MongoDbCollectionDataset, + "Dataset.MongoDbV2Collection": MongoDbV2CollectionDataset, + "Dataset.CosmosDbMongoDbApiCollection": CosmosDbMongoDbApiCollectionDataset, + "Dataset.ODataResource": ODataResourceDataset, + "Dataset.OracleTable": OracleTableDataset, + "Dataset.TeradataTable": TeradataTableDataset, + "Dataset.AzureMySqlTable": AzureMySqlTableDataset, + "Dataset.AmazonRedshiftTable": AmazonRedshiftTableDataset, + "Dataset.Db2Table": Db2TableDataset, + "Dataset.RelationalTable": RelationalTableDataset, + "Dataset.InformixTable": InformixTableDataset, + "Dataset.OdbcTable": OdbcTableDataset, + "Dataset.MySqlTable": MySqlTableDataset, + "Dataset.PostgreSqlTable": PostgreSqlTableDataset, + "Dataset.MicrosoftAccessTable": MicrosoftAccessTableDataset, + "Dataset.SalesforceObject": SalesforceObjectDataset, + "Dataset.SalesforceServiceCloudObject": SalesforceServiceCloudObjectDataset, + "Dataset.SybaseTable": SybaseTableDataset, + "Dataset.SapBwCube": SapBwCubeDataset, + "Dataset.SapCloudForCustomerResource": SapCloudForCustomerResourceDataset, + "Dataset.SapEccResource": SapEccResourceDataset, + "Dataset.SapHanaTable": SapHanaTableDataset, + "Dataset.SapOpenHubTable": SapOpenHubTableDataset, + "Dataset.SqlServerTable": SqlServerTableDataset, + "Dataset.RestResource": RestResourceDataset, + "Dataset.SapTableResource": SapTableResourceDataset, + "Dataset.WebTable": WebTableDataset, + "Dataset.AzureSearchIndex": AzureSearchIndexDataset, + "Dataset.AmazonMWSObject": AmazonMWSObjectDataset, + "Dataset.AzurePostgreSqlTable": AzurePostgreSqlTableDataset, + "Dataset.ConcurObject": ConcurObjectDataset, + "Dataset.CouchbaseTable": CouchbaseTableDataset, + "Dataset.DrillTable": DrillTableDataset, + "Dataset.EloquaObject": EloquaObjectDataset, + "Dataset.GoogleBigQueryObject": GoogleBigQueryObjectDataset, + "Dataset.GreenplumTable": GreenplumTableDataset, + "Dataset.HBaseObject": HBaseObjectDataset, + "Dataset.HiveObject": HiveObjectDataset, + "Dataset.HubspotObject": HubspotObjectDataset, + "Dataset.ImpalaObject": ImpalaObjectDataset, + "Dataset.JiraObject": JiraObjectDataset, + "Dataset.MagentoObject": MagentoObjectDataset, + "Dataset.MariaDBTable": MariaDBTableDataset, + "Dataset.AzureMariaDBTable": AzureMariaDBTableDataset, + "Dataset.MarketoObject": MarketoObjectDataset, + "Dataset.PaypalObject": PaypalObjectDataset, + "Dataset.PhoenixObject": PhoenixObjectDataset, + "Dataset.PrestoObject": PrestoObjectDataset, + "Dataset.QuickBooksObject": QuickBooksObjectDataset, + "Dataset.ServiceNowObject": ServiceNowObjectDataset, + "Dataset.ShopifyObject": ShopifyObjectDataset, + "Dataset.SparkObject": SparkObjectDataset, + "Dataset.SquareObject": SquareObjectDataset, + "Dataset.XeroObject": XeroObjectDataset, + "Dataset.ZohoObject": ZohoObjectDataset, + "Dataset.NetezzaTable": NetezzaTableDataset, + "Dataset.VerticaTable": VerticaTableDataset, + "Dataset.SalesforceMarketingCloudObject": SalesforceMarketingCloudObjectDataset, + "Dataset.ResponsysObject": ResponsysObjectDataset, + "Dataset.DynamicsAXResource": DynamicsAXResourceDataset, + "Dataset.OracleServiceCloudObject": OracleServiceCloudObjectDataset, + "Dataset.AzureDataExplorerTable": AzureDataExplorerTableDataset, + "Dataset.GoogleAdWordsObject": GoogleAdWordsObjectDataset, + "Activity.Container": ControlActivity, + "Activity.Execution": ExecutionActivity, + "Activity.ExecutePipeline": ExecutePipelineActivity, + "Activity.IfCondition": IfConditionActivity, + "Activity.Switch": SwitchActivity, + "Activity.ForEach": ForEachActivity, + "Activity.Wait": WaitActivity, + "Activity.Until": UntilActivity, + "Activity.Validation": ValidationActivity, + "Activity.Filter": FilterActivity, + "Activity.SetVariable": SetVariableActivity, + "Activity.AppendVariable": AppendVariableActivity, + "Activity.WebHook": WebHookActivity, + "Activity.SqlPoolStoredProcedure": SqlPoolStoredProcedureActivity, + "Trigger.RerunTumblingWindowTrigger": RerunTumblingWindowTrigger, + "Trigger.MultiplePipelineTrigger": MultiplePipelineTrigger, + "Trigger.TumblingWindowTrigger": TumblingWindowTrigger, + "Trigger.ChainingTrigger": ChainingTrigger, + "DataFlow.MappingDataFlow": MappingDataFlow, + "IntegrationRuntime.Managed": ManagedIntegrationRuntime, + "IntegrationRuntime.SelfHosted": SelfHostedIntegrationRuntime, + "SecretBase.SecureString": SecureString, + "SecretBase.AzureKeyVaultSecret": AzureKeyVaultSecretReference, + "DatasetLocation.AzureBlobStorageLocation": AzureBlobStorageLocation, + "DatasetLocation.AzureBlobFSLocation": AzureBlobFSLocation, + "DatasetLocation.AzureDataLakeStoreLocation": AzureDataLakeStoreLocation, + "DatasetLocation.AmazonS3Location": AmazonS3Location, + "DatasetLocation.FileServerLocation": FileServerLocation, + "DatasetLocation.AzureFileStorageLocation": AzureFileStorageLocation, + "DatasetLocation.GoogleCloudStorageLocation": GoogleCloudStorageLocation, + "DatasetLocation.FtpServerLocation": FtpServerLocation, + "DatasetLocation.SftpLocation": SftpLocation, + "DatasetLocation.HttpServerLocation": HttpServerLocation, + "DatasetLocation.HdfsLocation": HdfsLocation, + "DatasetStorageFormat.TextFormat": TextFormat, + "DatasetStorageFormat.JsonFormat": JsonFormat, + "DatasetStorageFormat.AvroFormat": AvroFormat, + "DatasetStorageFormat.OrcFormat": OrcFormat, + "DatasetStorageFormat.ParquetFormat": ParquetFormat, + "DatasetCompression.BZip2": DatasetBZip2Compression, + "DatasetCompression.GZip": DatasetGZipCompression, + "DatasetCompression.Deflate": DatasetDeflateCompression, + "DatasetCompression.ZipDeflate": DatasetZipDeflateCompression, + "WebLinkedServiceTypeProperties.Anonymous": WebAnonymousAuthentication, + "WebLinkedServiceTypeProperties.Basic": WebBasicAuthentication, + "WebLinkedServiceTypeProperties.ClientCertificate": WebClientCertificateAuthentication, + "StoreReadSettings.AzureBlobStorageReadSettings": AzureBlobStorageReadSettings, + "StoreReadSettings.AzureBlobFSReadSettings": AzureBlobFSReadSettings, + "StoreReadSettings.AzureDataLakeStoreReadSettings": AzureDataLakeStoreReadSettings, + "StoreReadSettings.AmazonS3ReadSettings": AmazonS3ReadSettings, + "StoreReadSettings.FileServerReadSettings": FileServerReadSettings, + "StoreReadSettings.AzureFileStorageReadSettings": AzureFileStorageReadSettings, + "StoreReadSettings.GoogleCloudStorageReadSettings": GoogleCloudStorageReadSettings, + "StoreReadSettings.FtpReadSettings": FtpReadSettings, + "StoreReadSettings.SftpReadSettings": SftpReadSettings, + "StoreReadSettings.HttpReadSettings": HttpReadSettings, + "StoreReadSettings.HdfsReadSettings": HdfsReadSettings, + "StoreWriteSettings.SftpWriteSettings": SftpWriteSettings, + "StoreWriteSettings.AzureBlobStorageWriteSettings": AzureBlobStorageWriteSettings, + "StoreWriteSettings.AzureBlobFSWriteSettings": AzureBlobFSWriteSettings, + "StoreWriteSettings.AzureDataLakeStoreWriteSettings": AzureDataLakeStoreWriteSettings, + "StoreWriteSettings.FileServerWriteSettings": FileServerWriteSettings, + "FormatReadSettings.DelimitedTextReadSettings": DelimitedTextReadSettings, + "FormatWriteSettings.AvroWriteSettings": AvroWriteSettings, + "FormatWriteSettings.DelimitedTextWriteSettings": DelimitedTextWriteSettings, + "FormatWriteSettings.JsonWriteSettings": JsonWriteSettings, + "CopySource.AvroSource": AvroSource, + "CopySource.ParquetSource": ParquetSource, + "CopySource.DelimitedTextSource": DelimitedTextSource, + "CopySource.JsonSource": JsonSource, + "CopySource.OrcSource": OrcSource, + "CopySource.BinarySource": BinarySource, + "CopySource.TabularSource": TabularSource, + "CopySource.BlobSource": BlobSource, + "CopySource.DocumentDbCollectionSource": DocumentDbCollectionSource, + "CopySource.CosmosDbSqlApiSource": CosmosDbSqlApiSource, + "CopySource.DynamicsSource": DynamicsSource, + "CopySource.DynamicsCrmSource": DynamicsCrmSource, + "CopySource.CommonDataServiceForAppsSource": CommonDataServiceForAppsSource, + "CopySource.RelationalSource": RelationalSource, + "CopySource.MicrosoftAccessSource": MicrosoftAccessSource, + "CopySource.ODataSource": ODataSource, + "CopySource.SalesforceServiceCloudSource": SalesforceServiceCloudSource, + "CopySource.RestSource": RestSource, + "CopySource.FileSystemSource": FileSystemSource, + "CopySource.HdfsSource": HdfsSource, + "CopySource.AzureDataExplorerSource": AzureDataExplorerSource, + "CopySource.OracleSource": OracleSource, + "CopySource.WebSource": WebSource, + "CopySource.MongoDbSource": MongoDbSource, + "CopySource.MongoDbV2Source": MongoDbV2Source, + "CopySource.CosmosDbMongoDbApiSource": CosmosDbMongoDbApiSource, + "CopySource.Office365Source": Office365Source, + "CopySource.AzureDataLakeStoreSource": AzureDataLakeStoreSource, + "CopySource.AzureBlobFSSource": AzureBlobFSSource, + "CopySource.HttpSource": HttpSource, + "CopySink.DelimitedTextSink": DelimitedTextSink, + "CopySink.JsonSink": JsonSink, + "CopySink.OrcSink": OrcSink, + "CopySink.AzurePostgreSqlSink": AzurePostgreSqlSink, + "CopySink.AzureMySqlSink": AzureMySqlSink, + "CopySink.SapCloudForCustomerSink": SapCloudForCustomerSink, + "CopySink.AzureQueueSink": AzureQueueSink, + "CopySink.AzureTableSink": AzureTableSink, + "CopySink.AvroSink": AvroSink, + "CopySink.ParquetSink": ParquetSink, + "CopySink.BinarySink": BinarySink, + "CopySink.BlobSink": BlobSink, + "CopySink.FileSystemSink": FileSystemSink, + "CopySink.DocumentDbCollectionSink": DocumentDbCollectionSink, + "CopySink.CosmosDbSqlApiSink": CosmosDbSqlApiSink, + "CopySink.SqlSink": SqlSink, + "CopySink.SqlServerSink": SqlServerSink, + "CopySink.AzureSqlSink": AzureSqlSink, + "CopySink.SqlMISink": SqlMISink, + "CopySink.SqlDWSink": SqlDWSink, + "CopySink.OracleSink": OracleSink, + "CopySink.AzureDataLakeStoreSink": AzureDataLakeStoreSink, + "CopySink.AzureBlobFSSink": AzureBlobFSSink, + "CopySink.AzureSearchIndexSink": AzureSearchIndexSink, + "CopySink.OdbcSink": OdbcSink, + "CopySink.InformixSink": InformixSink, + "CopySink.MicrosoftAccessSink": MicrosoftAccessSink, + "CopySink.DynamicsSink": DynamicsSink, + "CopySink.DynamicsCrmSink": DynamicsCrmSink, + "CopySink.CommonDataServiceForAppsSink": CommonDataServiceForAppsSink, + "CopySink.AzureDataExplorerSink": AzureDataExplorerSink, + "CopySink.SalesforceSink": SalesforceSink, + "CopySink.SalesforceServiceCloudSink": SalesforceServiceCloudSink, + "CopySink.CosmosDbMongoDbApiSink": CosmosDbMongoDbApiSink, + "CopyTranslator.TabularTranslator": TabularTranslator, + "DependencyReference.TriggerDependencyReference": TriggerDependencyReference, + "DependencyReference.SelfDependencyTumblingWindowTriggerReference": SelfDependencyTumblingWindowTriggerReference, + "LinkedIntegrationRuntimeType.Key": LinkedIntegrationRuntimeKeyAuthorization, + "LinkedIntegrationRuntimeType.RBAC": LinkedIntegrationRuntimeRbacAuthorization, + "Activity.Copy": CopyActivity, + "Activity.HDInsightHive": HDInsightHiveActivity, + "Activity.HDInsightPig": HDInsightPigActivity, + "Activity.HDInsightMapReduce": HDInsightMapReduceActivity, + "Activity.HDInsightStreaming": HDInsightStreamingActivity, + "Activity.HDInsightSpark": HDInsightSparkActivity, + "Activity.ExecuteSSISPackage": ExecuteSsisPackageActivity, + "Activity.Custom": CustomActivity, + "Activity.SqlServerStoredProcedure": SqlServerStoredProcedureActivity, + "Activity.Delete": DeleteActivity, + "Activity.AzureDataExplorerCommand": AzureDataExplorerCommandActivity, + "Activity.Lookup": LookupActivity, + "Activity.WebActivity": WebActivity, + "Activity.GetMetadata": GetMetadataActivity, + "Activity.AzureMLBatchExecution": AzureMLBatchExecutionActivity, + "Activity.AzureMLUpdateResource": AzureMLUpdateResourceActivity, + "Activity.AzureMLExecutePipeline": AzureMLExecutePipelineActivity, + "Activity.DataLakeAnalyticsU-SQL": DataLakeAnalyticsUsqlActivity, + "Activity.DatabricksNotebook": DatabricksNotebookActivity, + "Activity.DatabricksSparkJar": DatabricksSparkJarActivity, + "Activity.DatabricksSparkPython": DatabricksSparkPythonActivity, + "Activity.AzureFunctionActivity": AzureFunctionActivity, + "Activity.ExecuteDataFlow": ExecuteDataFlowActivity, + "Activity.SynapseNotebook": SynapseNotebookActivity, + "Activity.SparkJob": SynapseSparkJobDefinitionActivity, + "Trigger.ScheduleTrigger": ScheduleTrigger, + "Trigger.BlobTrigger": BlobTrigger, + "Trigger.BlobEventsTrigger": BlobEventsTrigger, + "CopySource.AzureTableSource": AzureTableSource, + "CopySource.InformixSource": InformixSource, + "CopySource.Db2Source": Db2Source, + "CopySource.OdbcSource": OdbcSource, + "CopySource.MySqlSource": MySqlSource, + "CopySource.PostgreSqlSource": PostgreSqlSource, + "CopySource.SybaseSource": SybaseSource, + "CopySource.SapBwSource": SapBwSource, + "CopySource.SalesforceSource": SalesforceSource, + "CopySource.SapCloudForCustomerSource": SapCloudForCustomerSource, + "CopySource.SapEccSource": SapEccSource, + "CopySource.SapHanaSource": SapHanaSource, + "CopySource.SapOpenHubSource": SapOpenHubSource, + "CopySource.SapTableSource": SapTableSource, + "CopySource.SqlSource": SqlSource, + "CopySource.SqlServerSource": SqlServerSource, + "CopySource.AzureSqlSource": AzureSqlSource, + "CopySource.SqlMISource": SqlMISource, + "CopySource.SqlDWSource": SqlDWSource, + "CopySource.AzureMySqlSource": AzureMySqlSource, + "CopySource.TeradataSource": TeradataSource, + "CopySource.CassandraSource": CassandraSource, + "CopySource.AmazonMWSSource": AmazonMWSSource, + "CopySource.AzurePostgreSqlSource": AzurePostgreSqlSource, + "CopySource.ConcurSource": ConcurSource, + "CopySource.CouchbaseSource": CouchbaseSource, + "CopySource.DrillSource": DrillSource, + "CopySource.EloquaSource": EloquaSource, + "CopySource.GoogleBigQuerySource": GoogleBigQuerySource, + "CopySource.GreenplumSource": GreenplumSource, + "CopySource.HBaseSource": HBaseSource, + "CopySource.HiveSource": HiveSource, + "CopySource.HubspotSource": HubspotSource, + "CopySource.ImpalaSource": ImpalaSource, + "CopySource.JiraSource": JiraSource, + "CopySource.MagentoSource": MagentoSource, + "CopySource.MariaDBSource": MariaDBSource, + "CopySource.AzureMariaDBSource": AzureMariaDBSource, + "CopySource.MarketoSource": MarketoSource, + "CopySource.PaypalSource": PaypalSource, + "CopySource.PhoenixSource": PhoenixSource, + "CopySource.PrestoSource": PrestoSource, + "CopySource.QuickBooksSource": QuickBooksSource, + "CopySource.ServiceNowSource": ServiceNowSource, + "CopySource.ShopifySource": ShopifySource, + "CopySource.SparkSource": SparkSource, + "CopySource.SquareSource": SquareSource, + "CopySource.XeroSource": XeroSource, + "CopySource.ZohoSource": ZohoSource, + "CopySource.NetezzaSource": NetezzaSource, + "CopySource.VerticaSource": VerticaSource, + "CopySource.SalesforceMarketingCloudSource": SalesforceMarketingCloudSource, + "CopySource.ResponsysSource": ResponsysSource, + "CopySource.DynamicsAXSource": DynamicsAXSource, + "CopySource.OracleServiceCloudSource": OracleServiceCloudSource, + "CopySource.GoogleAdWordsSource": GoogleAdWordsSource, + "CopySource.AmazonRedshiftSource": AmazonRedshiftSource, + "DependencyReference.TumblingWindowTriggerDependencyReference": TumblingWindowTriggerDependencyReference +}; diff --git a/sdk/synapse/synapse-artifacts/src/models/parameters.ts b/sdk/synapse/synapse-artifacts/src/models/parameters.ts new file mode 100644 index 000000000000..592ee559cd05 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/models/parameters.ts @@ -0,0 +1,402 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; +import { + LinkedServiceResource as LinkedServiceResourceMapper, + ArtifactRenameRequest as ArtifactRenameRequestMapper, + DatasetResource as DatasetResourceMapper, + PipelineResource as PipelineResourceMapper, + RunFilterParameters as RunFilterParametersMapper, + TriggerResource as TriggerResourceMapper, + DataFlowResource as DataFlowResourceMapper, + CreateDataFlowDebugSessionRequest as CreateDataFlowDebugSessionRequestMapper, + DataFlowDebugPackage as DataFlowDebugPackageMapper, + DeleteDataFlowDebugSessionRequest as DeleteDataFlowDebugSessionRequestMapper, + DataFlowDebugCommandRequest as DataFlowDebugCommandRequestMapper, + SqlScriptResource as SqlScriptResourceMapper, + SparkJobDefinitionResource as SparkJobDefinitionResourceMapper, + NotebookResource as NotebookResourceMapper, + GitHubAccessTokenRequest as GitHubAccessTokenRequestMapper +} from "../models/mappers"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const apiVersion: OperationQueryParameter = { + parameterPath: "apiVersion", + mapper: { + defaultValue: "2019-06-01-preview", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const contentType: OperationParameter = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; + +export const linkedService: OperationParameter = { + parameterPath: "linkedService", + mapper: LinkedServiceResourceMapper +}; + +export const linkedServiceName: OperationURLParameter = { + parameterPath: "linkedServiceName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "linkedServiceName", + required: true, + type: { + name: "String" + } + } +}; + +export const ifMatch: OperationParameter = { + parameterPath: ["options", "ifMatch"], + mapper: { + serializedName: "If-Match", + type: { + name: "String" + } + } +}; + +export const ifNoneMatch: OperationParameter = { + parameterPath: ["options", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + type: { + name: "String" + } + } +}; + +export const request: OperationParameter = { + parameterPath: "request", + mapper: ArtifactRenameRequestMapper +}; + +export const nextLink: OperationURLParameter = { + parameterPath: "nextLink", + mapper: { + serializedName: "nextLink", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const dataset: OperationParameter = { + parameterPath: "dataset", + mapper: DatasetResourceMapper +}; + +export const datasetName: OperationURLParameter = { + parameterPath: "datasetName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "datasetName", + required: true, + type: { + name: "String" + } + } +}; + +export const pipeline: OperationParameter = { + parameterPath: "pipeline", + mapper: PipelineResourceMapper +}; + +export const pipelineName: OperationURLParameter = { + parameterPath: "pipelineName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "pipelineName", + required: true, + type: { + name: "String" + } + } +}; + +export const parameters: OperationParameter = { + parameterPath: ["options", "parameters"], + mapper: { + serializedName: "parameters", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + } +}; + +export const referencePipelineRunId: OperationQueryParameter = { + parameterPath: ["options", "referencePipelineRunId"], + mapper: { + serializedName: "referencePipelineRunId", + type: { + name: "String" + } + } +}; + +export const isRecovery: OperationQueryParameter = { + parameterPath: ["options", "isRecovery"], + mapper: { + serializedName: "isRecovery", + type: { + name: "Boolean" + } + } +}; + +export const startActivityName: OperationQueryParameter = { + parameterPath: ["options", "startActivityName"], + mapper: { + serializedName: "startActivityName", + type: { + name: "String" + } + } +}; + +export const filterParameters: OperationParameter = { + parameterPath: "filterParameters", + mapper: RunFilterParametersMapper +}; + +export const runId: OperationURLParameter = { + parameterPath: "runId", + mapper: { + serializedName: "runId", + required: true, + type: { + name: "String" + } + } +}; + +export const isRecursive: OperationQueryParameter = { + parameterPath: ["options", "isRecursive"], + mapper: { + serializedName: "isRecursive", + type: { + name: "Boolean" + } + } +}; + +export const trigger: OperationParameter = { + parameterPath: "trigger", + mapper: TriggerResourceMapper +}; + +export const triggerName: OperationURLParameter = { + parameterPath: "triggerName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "triggerName", + required: true, + type: { + name: "String" + } + } +}; + +export const dataFlow: OperationParameter = { + parameterPath: "dataFlow", + mapper: DataFlowResourceMapper +}; + +export const dataFlowName: OperationURLParameter = { + parameterPath: "dataFlowName", + mapper: { + constraints: { + Pattern: new RegExp("^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$"), + MaxLength: 260, + MinLength: 1 + }, + serializedName: "dataFlowName", + required: true, + type: { + name: "String" + } + } +}; + +export const request1: OperationParameter = { + parameterPath: "request", + mapper: CreateDataFlowDebugSessionRequestMapper +}; + +export const request2: OperationParameter = { + parameterPath: "request", + mapper: DataFlowDebugPackageMapper +}; + +export const request3: OperationParameter = { + parameterPath: "request", + mapper: DeleteDataFlowDebugSessionRequestMapper +}; + +export const request4: OperationParameter = { + parameterPath: "request", + mapper: DataFlowDebugCommandRequestMapper +}; + +export const sqlScript: OperationParameter = { + parameterPath: "sqlScript", + mapper: SqlScriptResourceMapper +}; + +export const sqlScriptName: OperationURLParameter = { + parameterPath: "sqlScriptName", + mapper: { + serializedName: "sqlScriptName", + required: true, + type: { + name: "String" + } + } +}; + +export const sparkJobDefinition: OperationParameter = { + parameterPath: "sparkJobDefinition", + mapper: SparkJobDefinitionResourceMapper +}; + +export const sparkJobDefinitionName: OperationURLParameter = { + parameterPath: "sparkJobDefinitionName", + mapper: { + serializedName: "sparkJobDefinitionName", + required: true, + type: { + name: "String" + } + } +}; + +export const sparkJobDefinitionAzureResource: OperationParameter = { + parameterPath: "sparkJobDefinitionAzureResource", + mapper: SparkJobDefinitionResourceMapper +}; + +export const notebook: OperationParameter = { + parameterPath: "notebook", + mapper: NotebookResourceMapper +}; + +export const notebookName: OperationURLParameter = { + parameterPath: "notebookName", + mapper: { + serializedName: "notebookName", + required: true, + type: { + name: "String" + } + } +}; + +export const sqlPoolName: OperationURLParameter = { + parameterPath: "sqlPoolName", + mapper: { + serializedName: "sqlPoolName", + required: true, + type: { + name: "String" + } + } +}; + +export const bigDataPoolName: OperationURLParameter = { + parameterPath: "bigDataPoolName", + mapper: { + serializedName: "bigDataPoolName", + required: true, + type: { + name: "String" + } + } +}; + +export const integrationRuntimeName: OperationURLParameter = { + parameterPath: "integrationRuntimeName", + mapper: { + serializedName: "integrationRuntimeName", + required: true, + type: { + name: "String" + } + } +}; + +export const gitHubAccessTokenRequest: OperationParameter = { + parameterPath: "gitHubAccessTokenRequest", + mapper: GitHubAccessTokenRequestMapper +}; + +export const clientRequestId: OperationParameter = { + parameterPath: ["options", "clientRequestId"], + mapper: { + serializedName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts new file mode 100644 index 000000000000..3d2f97d9e1df --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/bigDataPools.ts @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { BigDataPoolsListResponse, BigDataPoolsGetResponse } from "../models"; + +/** + * Class representing a BigDataPools. + */ +export class BigDataPools { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class BigDataPools class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * List Big Data Pools + * @param options The options parameters. + */ + async list(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); + return result as BigDataPoolsListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Get Big Data Pool + * @param bigDataPoolName The Big Data Pool name + * @param options The options parameters. + */ + async get( + bigDataPoolName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + bigDataPoolName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + return result as BigDataPoolsGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const listOperationSpec: coreHttp.OperationSpec = { + path: "/bigDataPools", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BigDataPoolResourceInfoListResult + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getOperationSpec: coreHttp.OperationSpec = { + path: "/bigDataPools/{bigDataPoolName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BigDataPoolResourceInfo + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.bigDataPoolName], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts new file mode 100644 index 000000000000..0795b9e91ac3 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlow.ts @@ -0,0 +1,451 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + DataFlowResource, + DataFlowCreateOrUpdateDataFlowOptionalParams, + DataFlowCreateOrUpdateDataFlowResponse, + DataFlowGetDataFlowOptionalParams, + DataFlowGetDataFlowResponse, + ArtifactRenameRequest, + DataFlowGetDataFlowsByWorkspaceResponse, + DataFlowGetDataFlowsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a DataFlow. + */ +export class DataFlow { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class DataFlow class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists data flows. + * @param options The options parameters. + */ + public listDataFlowsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getDataFlowsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getDataFlowsByWorkspacePagingPage(options); + } + }; + } + + private async *getDataFlowsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getDataFlowsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getDataFlowsByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getDataFlowsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getDataFlowsByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Creates or updates a data flow. + * @param dataFlowName The data flow name. + * @param dataFlow Data flow resource definition. + * @param options The options parameters. + */ + async createOrUpdateDataFlow( + dataFlowName: string, + dataFlow: DataFlowResource, + options?: DataFlowCreateOrUpdateDataFlowOptionalParams + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateDataFlow", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + dataFlowName, + dataFlow, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as DataFlowCreateOrUpdateDataFlowResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateDataFlowOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateDataFlowOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a data flow. + * @param dataFlowName The data flow name. + * @param options The options parameters. + */ + async getDataFlow( + dataFlowName: string, + options?: DataFlowGetDataFlowOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getDataFlow", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + dataFlowName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDataFlowOperationSpec + ); + return result as DataFlowGetDataFlowResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a data flow. + * @param dataFlowName The data flow name. + * @param options The options parameters. + */ + async deleteDataFlow( + dataFlowName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteDataFlow", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + dataFlowName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteDataFlowOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteDataFlowOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a dataflow. + * @param dataFlowName The data flow name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameDataFlow( + dataFlowName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameDataFlow", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + dataFlowName, + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameDataFlowOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameDataFlowOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Lists data flows. + * @param options The options parameters. + */ + private async _getDataFlowsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDataFlowsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDataFlowsByWorkspaceOperationSpec + ); + return result as DataFlowGetDataFlowsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * GetDataFlowsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetDataFlowsByWorkspace + * method. + * @param options The options parameters. + */ + private async _getDataFlowsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDataFlowsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDataFlowsByWorkspaceNextOperationSpec + ); + return result as DataFlowGetDataFlowsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const createOrUpdateDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows/{dataFlowName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.DataFlowResource + }, + 201: { + bodyMapper: Mappers.DataFlowResource + }, + 202: { + bodyMapper: Mappers.DataFlowResource + }, + 204: { + bodyMapper: Mappers.DataFlowResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.dataFlow, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.dataFlowName], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + mediaType: "json", + serializer +}; +const getDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows/{dataFlowName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DataFlowResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.dataFlowName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows/{dataFlowName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.dataFlowName], + headerParameters: [Parameters.accept], + serializer +}; +const renameDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows/{dataFlowName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.dataFlowName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getDataFlowsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/dataflows", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DataFlowListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getDataFlowsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DataFlowListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts new file mode 100644 index 000000000000..42c6b2360da6 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/dataFlowDebugSession.ts @@ -0,0 +1,443 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + DataFlowDebugSessionInfo, + CreateDataFlowDebugSessionRequest, + DataFlowDebugSessionCreateDataFlowDebugSessionResponse, + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse, + DataFlowDebugPackage, + DataFlowDebugSessionAddDataFlowResponse, + DeleteDataFlowDebugSessionRequest, + DataFlowDebugCommandRequest, + DataFlowDebugSessionExecuteCommandResponse, + DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a DataFlowDebugSession. + */ +export class DataFlowDebugSession { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class DataFlowDebugSession class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Query all active data flow debug sessions. + * @param options The options parameters. + */ + public listQueryDataFlowDebugSessionsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.queryDataFlowDebugSessionsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.queryDataFlowDebugSessionsByWorkspacePagingPage(options); + } + }; + } + + private async *queryDataFlowDebugSessionsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._queryDataFlowDebugSessionsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._queryDataFlowDebugSessionsByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *queryDataFlowDebugSessionsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.queryDataFlowDebugSessionsByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Creates a data flow debug session. + * @param request Data flow debug session definition + * @param options The options parameters. + */ + async createDataFlowDebugSession( + request: CreateDataFlowDebugSessionRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createDataFlowDebugSession", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as DataFlowDebugSessionCreateDataFlowDebugSessionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createDataFlowDebugSessionOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createDataFlowDebugSessionOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Query all active data flow debug sessions. + * @param options The options parameters. + */ + private async _queryDataFlowDebugSessionsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryDataFlowDebugSessionsByWorkspaceOperationSpec + ); + return result as DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Add a data flow into debug session. + * @param request Data flow debug session definition with debug content. + * @param options The options parameters. + */ + async addDataFlow( + request: DataFlowDebugPackage, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-addDataFlow", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + request, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + addDataFlowOperationSpec + ); + return result as DataFlowDebugSessionAddDataFlowResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a data flow debug session. + * @param request Data flow debug session definition for deletion + * @param options The options parameters. + */ + async deleteDataFlowDebugSession( + request: DeleteDataFlowDebugSessionRequest, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteDataFlowDebugSession", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + request, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + deleteDataFlowDebugSessionOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Execute a data flow debug command. + * @param request Data flow debug command definition. + * @param options The options parameters. + */ + async executeCommand( + request: DataFlowDebugCommandRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-executeCommand", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as DataFlowDebugSessionExecuteCommandResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + executeCommandOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: executeCommandOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * QueryDataFlowDebugSessionsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the + * QueryDataFlowDebugSessionsByWorkspace method. + * @param options The options parameters. + */ + private async _queryDataFlowDebugSessionsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_queryDataFlowDebugSessionsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryDataFlowDebugSessionsByWorkspaceNextOperationSpec + ); + return result as DataFlowDebugSessionQueryDataFlowDebugSessionsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const createDataFlowDebugSessionOperationSpec: coreHttp.OperationSpec = { + path: "/createDataFlowDebugSession", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.CreateDataFlowDebugSessionResponse + }, + 201: { + bodyMapper: Mappers.CreateDataFlowDebugSessionResponse + }, + 202: { + bodyMapper: Mappers.CreateDataFlowDebugSessionResponse + }, + 204: { + bodyMapper: Mappers.CreateDataFlowDebugSessionResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request1, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const queryDataFlowDebugSessionsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/queryDataFlowDebugSessions", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.QueryDataFlowDebugSessionsResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const addDataFlowOperationSpec: coreHttp.OperationSpec = { + path: "/addDataFlowToDebugSession", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.AddDataFlowToDebugSessionResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request2, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const deleteDataFlowDebugSessionOperationSpec: coreHttp.OperationSpec = { + path: "/deleteDataFlowDebugSession", + httpMethod: "POST", + responses: { + 200: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request3, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const executeCommandOperationSpec: coreHttp.OperationSpec = { + path: "/executeDataFlowDebugCommand", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.DataFlowDebugCommandResponse + }, + 201: { + bodyMapper: Mappers.DataFlowDebugCommandResponse + }, + 202: { + bodyMapper: Mappers.DataFlowDebugCommandResponse + }, + 204: { + bodyMapper: Mappers.DataFlowDebugCommandResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request4, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const queryDataFlowDebugSessionsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.QueryDataFlowDebugSessionsResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/dataset.ts b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts new file mode 100644 index 000000000000..a250571d1e19 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/dataset.ts @@ -0,0 +1,451 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + DatasetResource, + DatasetGetDatasetsByWorkspaceResponse, + DatasetCreateOrUpdateDatasetOptionalParams, + DatasetCreateOrUpdateDatasetResponse, + DatasetGetDatasetOptionalParams, + DatasetGetDatasetResponse, + ArtifactRenameRequest, + DatasetGetDatasetsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a Dataset. + */ +export class Dataset { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Dataset class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists datasets. + * @param options The options parameters. + */ + public listDatasetsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getDatasetsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getDatasetsByWorkspacePagingPage(options); + } + }; + } + + private async *getDatasetsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getDatasetsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getDatasetsByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getDatasetsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getDatasetsByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists datasets. + * @param options The options parameters. + */ + private async _getDatasetsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDatasetsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDatasetsByWorkspaceOperationSpec + ); + return result as DatasetGetDatasetsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Creates or updates a dataset. + * @param datasetName The dataset name. + * @param dataset Dataset resource definition. + * @param options The options parameters. + */ + async createOrUpdateDataset( + datasetName: string, + dataset: DatasetResource, + options?: DatasetCreateOrUpdateDatasetOptionalParams + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateDataset", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + datasetName, + dataset, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as DatasetCreateOrUpdateDatasetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateDatasetOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateDatasetOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a dataset. + * @param datasetName The dataset name. + * @param options The options parameters. + */ + async getDataset( + datasetName: string, + options?: DatasetGetDatasetOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getDataset", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + datasetName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDatasetOperationSpec + ); + return result as DatasetGetDatasetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a dataset. + * @param datasetName The dataset name. + * @param options The options parameters. + */ + async deleteDataset( + datasetName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteDataset", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + datasetName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteDatasetOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteDatasetOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a dataset. + * @param datasetName The dataset name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameDataset( + datasetName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameDataset", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + datasetName, + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameDatasetOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameDatasetOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetDatasetsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetDatasetsByWorkspace method. + * @param options The options parameters. + */ + private async _getDatasetsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getDatasetsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getDatasetsByWorkspaceNextOperationSpec + ); + return result as DatasetGetDatasetsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getDatasetsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/datasets", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DatasetListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateDatasetOperationSpec: coreHttp.OperationSpec = { + path: "/datasets/{datasetName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.DatasetResource + }, + 201: { + bodyMapper: Mappers.DatasetResource + }, + 202: { + bodyMapper: Mappers.DatasetResource + }, + 204: { + bodyMapper: Mappers.DatasetResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.dataset, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.datasetName], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + mediaType: "json", + serializer +}; +const getDatasetOperationSpec: coreHttp.OperationSpec = { + path: "/datasets/{datasetName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DatasetResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.datasetName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteDatasetOperationSpec: coreHttp.OperationSpec = { + path: "/datasets/{datasetName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.datasetName], + headerParameters: [Parameters.accept], + serializer +}; +const renameDatasetOperationSpec: coreHttp.OperationSpec = { + path: "/datasets/{datasetName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.datasetName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getDatasetsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.DatasetListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/index.ts b/sdk/synapse/synapse-artifacts/src/operations/index.ts new file mode 100644 index 000000000000..680e8760f37f --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/index.ts @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./linkedService"; +export * from "./dataset"; +export * from "./pipeline"; +export * from "./pipelineRun"; +export * from "./trigger"; +export * from "./triggerRun"; +export * from "./dataFlow"; +export * from "./dataFlowDebugSession"; +export * from "./sqlScript"; +export * from "./sparkJobDefinition"; +export * from "./notebook"; +export * from "./workspace"; +export * from "./sqlPools"; +export * from "./bigDataPools"; +export * from "./integrationRuntimes"; +export * from "./workspaceGitRepoManagement"; diff --git a/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts new file mode 100644 index 000000000000..7c51643af23f --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/integrationRuntimes.ts @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { IntegrationRuntimesListResponse, IntegrationRuntimesGetResponse } from "../models"; + +/** + * Class representing a IntegrationRuntimes. + */ +export class IntegrationRuntimes { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class IntegrationRuntimes class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * List Integration Runtimes + * @param options The options parameters. + */ + async list(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); + return result as IntegrationRuntimesListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Get Integration Runtime + * @param integrationRuntimeName The Integration Runtime name + * @param options The options parameters. + */ + async get( + integrationRuntimeName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + integrationRuntimeName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + return result as IntegrationRuntimesGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const listOperationSpec: coreHttp.OperationSpec = { + path: "/integrationRuntimes", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.IntegrationRuntimeListResponse + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getOperationSpec: coreHttp.OperationSpec = { + path: "/integrationRuntimes/{integrationRuntimeName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.IntegrationRuntimeResource + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.integrationRuntimeName], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts new file mode 100644 index 000000000000..8016b8281fc3 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/linkedService.ts @@ -0,0 +1,452 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + LinkedServiceResource, + LinkedServiceGetLinkedServicesByWorkspaceResponse, + LinkedServiceCreateOrUpdateLinkedServiceOptionalParams, + LinkedServiceCreateOrUpdateLinkedServiceResponse, + LinkedServiceGetLinkedServiceOptionalParams, + LinkedServiceGetLinkedServiceResponse, + ArtifactRenameRequest, + LinkedServiceGetLinkedServicesByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a LinkedService. + */ +export class LinkedService { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class LinkedService class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists linked services. + * @param options The options parameters. + */ + public listLinkedServicesByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getLinkedServicesByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getLinkedServicesByWorkspacePagingPage(options); + } + }; + } + + private async *getLinkedServicesByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getLinkedServicesByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getLinkedServicesByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getLinkedServicesByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getLinkedServicesByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists linked services. + * @param options The options parameters. + */ + private async _getLinkedServicesByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getLinkedServicesByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getLinkedServicesByWorkspaceOperationSpec + ); + return result as LinkedServiceGetLinkedServicesByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Creates or updates a linked service. + * @param linkedServiceName The linked service name. + * @param linkedService Linked service resource definition. + * @param options The options parameters. + */ + async createOrUpdateLinkedService( + linkedServiceName: string, + linkedService: LinkedServiceResource, + options?: LinkedServiceCreateOrUpdateLinkedServiceOptionalParams + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateLinkedService", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + linkedServiceName, + linkedService, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as LinkedServiceCreateOrUpdateLinkedServiceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateLinkedServiceOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateLinkedServiceOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a linked service. + * @param linkedServiceName The linked service name. + * @param options The options parameters. + */ + async getLinkedService( + linkedServiceName: string, + options?: LinkedServiceGetLinkedServiceOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getLinkedService", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + linkedServiceName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getLinkedServiceOperationSpec + ); + return result as LinkedServiceGetLinkedServiceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a linked service. + * @param linkedServiceName The linked service name. + * @param options The options parameters. + */ + async deleteLinkedService( + linkedServiceName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteLinkedService", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + linkedServiceName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteLinkedServiceOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteLinkedServiceOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a linked service. + * @param linkedServiceName The linked service name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameLinkedService( + linkedServiceName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameLinkedService", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + linkedServiceName, + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameLinkedServiceOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameLinkedServiceOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetLinkedServicesByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetLinkedServicesByWorkspace + * method. + * @param options The options parameters. + */ + private async _getLinkedServicesByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getLinkedServicesByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getLinkedServicesByWorkspaceNextOperationSpec + ); + return result as LinkedServiceGetLinkedServicesByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getLinkedServicesByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.LinkedServiceListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateLinkedServiceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices/{linkedServiceName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.LinkedServiceResource + }, + 201: { + bodyMapper: Mappers.LinkedServiceResource + }, + 202: { + bodyMapper: Mappers.LinkedServiceResource + }, + 204: { + bodyMapper: Mappers.LinkedServiceResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.linkedService, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + mediaType: "json", + serializer +}; +const getLinkedServiceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices/{linkedServiceName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.LinkedServiceResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteLinkedServiceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices/{linkedServiceName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], + headerParameters: [Parameters.accept], + serializer +}; +const renameLinkedServiceOperationSpec: coreHttp.OperationSpec = { + path: "/linkedservices/{linkedServiceName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.linkedServiceName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getLinkedServicesByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.LinkedServiceListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/notebook.ts b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts new file mode 100644 index 000000000000..cf4935ef07c2 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/notebook.ts @@ -0,0 +1,594 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + NotebookResource, + NotebookGetNotebooksByWorkspaceResponse, + NotebookGetNotebookSummaryByWorkSpaceResponse, + NotebookCreateOrUpdateNotebookOptionalParams, + NotebookCreateOrUpdateNotebookResponse, + NotebookGetNotebookOptionalParams, + NotebookGetNotebookResponse, + ArtifactRenameRequest, + NotebookGetNotebooksByWorkspaceNextResponse, + NotebookGetNotebookSummaryByWorkSpaceNextResponse +} from "../models"; + +/** + * Class representing a Notebook. + */ +export class Notebook { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Notebook class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists Notebooks. + * @param options The options parameters. + */ + public listNotebooksByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getNotebooksByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getNotebooksByWorkspacePagingPage(options); + } + }; + } + + private async *getNotebooksByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getNotebooksByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getNotebooksByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getNotebooksByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getNotebooksByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists a summary of Notebooks. + * @param options The options parameters. + */ + public listNotebookSummaryByWorkSpace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getNotebookSummaryByWorkSpacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getNotebookSummaryByWorkSpacePagingPage(options); + } + }; + } + + private async *getNotebookSummaryByWorkSpacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getNotebookSummaryByWorkSpace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getNotebookSummaryByWorkSpaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getNotebookSummaryByWorkSpacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getNotebookSummaryByWorkSpacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists Notebooks. + * @param options The options parameters. + */ + private async _getNotebooksByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getNotebooksByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebooksByWorkspaceOperationSpec + ); + return result as NotebookGetNotebooksByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Lists a summary of Notebooks. + * @param options The options parameters. + */ + private async _getNotebookSummaryByWorkSpace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getNotebookSummaryByWorkSpace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebookSummaryByWorkSpaceOperationSpec + ); + return result as NotebookGetNotebookSummaryByWorkSpaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Creates or updates a Note Book. + * @param notebookName The notebook name. + * @param notebook Note book resource definition. + * @param options The options parameters. + */ + async createOrUpdateNotebook( + notebookName: string, + notebook: NotebookResource, + options?: NotebookCreateOrUpdateNotebookOptionalParams + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateNotebook", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + notebookName, + notebook, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as NotebookCreateOrUpdateNotebookResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateNotebookOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateNotebookOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a Note Book. + * @param notebookName The notebook name. + * @param options The options parameters. + */ + async getNotebook( + notebookName: string, + options?: NotebookGetNotebookOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getNotebook", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + notebookName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebookOperationSpec + ); + return result as NotebookGetNotebookResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a Note book. + * @param notebookName The notebook name. + * @param options The options parameters. + */ + async deleteNotebook( + notebookName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteNotebook", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + notebookName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteNotebookOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteNotebookOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a notebook. + * @param notebookName The notebook name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameNotebook( + notebookName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameNotebook", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + notebookName, + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameNotebookOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameNotebookOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetNotebooksByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetNotebooksByWorkspace + * method. + * @param options The options parameters. + */ + private async _getNotebooksByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getNotebooksByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebooksByWorkspaceNextOperationSpec + ); + return result as NotebookGetNotebooksByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * GetNotebookSummaryByWorkSpaceNext + * @param nextLink The nextLink from the previous successful call to the GetNotebookSummaryByWorkSpace + * method. + * @param options The options parameters. + */ + private async _getNotebookSummaryByWorkSpaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getNotebookSummaryByWorkSpaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getNotebookSummaryByWorkSpaceNextOperationSpec + ); + return result as NotebookGetNotebookSummaryByWorkSpaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getNotebooksByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getNotebookSummaryByWorkSpaceOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/summary", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateNotebookOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/{notebookName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.NotebookResource + }, + 201: { + bodyMapper: Mappers.NotebookResource + }, + 202: { + bodyMapper: Mappers.NotebookResource + }, + 204: { + bodyMapper: Mappers.NotebookResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.notebook, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.notebookName], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + mediaType: "json", + serializer +}; +const getNotebookOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/{notebookName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.notebookName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteNotebookOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/{notebookName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.notebookName], + headerParameters: [Parameters.accept], + serializer +}; +const renameNotebookOperationSpec: coreHttp.OperationSpec = { + path: "/notebooks/{notebookName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.notebookName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getNotebooksByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; +const getNotebookSummaryByWorkSpaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.NotebookListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts new file mode 100644 index 000000000000..f58c8c3929e2 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/pipeline.ts @@ -0,0 +1,511 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + PipelineResource, + PipelineGetPipelinesByWorkspaceResponse, + PipelineCreateOrUpdatePipelineOptionalParams, + PipelineCreateOrUpdatePipelineResponse, + PipelineGetPipelineOptionalParams, + PipelineGetPipelineResponse, + ArtifactRenameRequest, + PipelineCreatePipelineRunOptionalParams, + PipelineCreatePipelineRunResponse, + PipelineGetPipelinesByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a Pipeline. + */ +export class Pipeline { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Pipeline class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists pipelines. + * @param options The options parameters. + */ + public listPipelinesByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getPipelinesByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getPipelinesByWorkspacePagingPage(options); + } + }; + } + + private async *getPipelinesByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getPipelinesByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getPipelinesByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getPipelinesByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getPipelinesByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists pipelines. + * @param options The options parameters. + */ + private async _getPipelinesByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getPipelinesByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getPipelinesByWorkspaceOperationSpec + ); + return result as PipelineGetPipelinesByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Creates or updates a pipeline. + * @param pipelineName The pipeline name. + * @param pipeline Pipeline resource definition. + * @param options The options parameters. + */ + async createOrUpdatePipeline( + pipelineName: string, + pipeline: PipelineResource, + options?: PipelineCreateOrUpdatePipelineOptionalParams + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdatePipeline", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + pipeline, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as PipelineCreateOrUpdatePipelineResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdatePipelineOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdatePipelineOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + async getPipeline( + pipelineName: string, + options?: PipelineGetPipelineOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getPipeline", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getPipelineOperationSpec + ); + return result as PipelineGetPipelineResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + async deletePipeline( + pipelineName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deletePipeline", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deletePipelineOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deletePipelineOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Renames a pipeline. + * @param pipelineName The pipeline name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renamePipeline( + pipelineName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renamePipeline", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renamePipelineOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renamePipelineOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Creates a run of a pipeline. + * @param pipelineName The pipeline name. + * @param options The options parameters. + */ + async createPipelineRun( + pipelineName: string, + options?: PipelineCreatePipelineRunOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createPipelineRun", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createPipelineRunOperationSpec + ); + return result as PipelineCreatePipelineRunResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * GetPipelinesByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetPipelinesByWorkspace + * method. + * @param options The options parameters. + */ + private async _getPipelinesByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getPipelinesByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getPipelinesByWorkspaceNextOperationSpec + ); + return result as PipelineGetPipelinesByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getPipelinesByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PipelineListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdatePipelineOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.PipelineResource + }, + 201: { + bodyMapper: Mappers.PipelineResource + }, + 202: { + bodyMapper: Mappers.PipelineResource + }, + 204: { + bodyMapper: Mappers.PipelineResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.pipeline, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + mediaType: "json", + serializer +}; +const getPipelineOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PipelineResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deletePipelineOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept], + serializer +}; +const renamePipelineOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const createPipelineRunOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}/createRun", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: Mappers.CreateRunResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.parameters, + queryParameters: [ + Parameters.apiVersion, + Parameters.referencePipelineRunId, + Parameters.isRecovery, + Parameters.startActivityName + ], + urlParameters: [Parameters.endpoint, Parameters.pipelineName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getPipelinesByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PipelineListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts new file mode 100644 index 000000000000..c3246425091e --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/pipelineRun.ts @@ -0,0 +1,242 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { + RunFilterParameters, + PipelineRunQueryPipelineRunsByWorkspaceResponse, + PipelineRunGetPipelineRunResponse, + PipelineRunQueryActivityRunsResponse, + PipelineRunCancelPipelineRunOptionalParams +} from "../models"; + +/** + * Class representing a PipelineRun. + */ +export class PipelineRun { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class PipelineRun class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Query pipeline runs in the workspace based on input filter conditions. + * @param filterParameters Parameters to filter the pipeline run. + * @param options The options parameters. + */ + async queryPipelineRunsByWorkspace( + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-queryPipelineRunsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + filterParameters, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryPipelineRunsByWorkspaceOperationSpec + ); + return result as PipelineRunQueryPipelineRunsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Get a pipeline run by its run ID. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + async getPipelineRun( + runId: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getPipelineRun", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + runId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getPipelineRunOperationSpec + ); + return result as PipelineRunGetPipelineRunResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Query activity runs based on input filter conditions. + * @param pipelineName The pipeline name. + * @param runId The pipeline run identifier. + * @param filterParameters Parameters to filter the activity runs. + * @param options The options parameters. + */ + async queryActivityRuns( + pipelineName: string, + runId: string, + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-queryActivityRuns", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + pipelineName, + runId, + filterParameters, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryActivityRunsOperationSpec + ); + return result as PipelineRunQueryActivityRunsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Cancel a pipeline run by its run ID. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + async cancelPipelineRun( + runId: string, + options?: PipelineRunCancelPipelineRunOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-cancelPipelineRun", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + runId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelPipelineRunOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const queryPipelineRunsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/queryPipelineRuns", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.PipelineRunsQueryResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.filterParameters, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getPipelineRunOperationSpec: coreHttp.OperationSpec = { + path: "/pipelineruns/{runId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PipelineRun + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.runId], + headerParameters: [Parameters.accept], + serializer +}; +const queryActivityRunsOperationSpec: coreHttp.OperationSpec = { + path: "/pipelines/{pipelineName}/pipelineruns/{runId}/queryActivityruns", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.ActivityRunsQueryResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.filterParameters, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.pipelineName, Parameters.runId], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const cancelPipelineRunOperationSpec: coreHttp.OperationSpec = { + path: "/pipelineruns/{runId}/cancel", + httpMethod: "POST", + responses: { + 200: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion, Parameters.isRecursive], + urlParameters: [Parameters.endpoint, Parameters.runId], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts new file mode 100644 index 000000000000..c7e9f1cf8def --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/sparkJobDefinition.ts @@ -0,0 +1,565 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + SparkJobDefinitionResource, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams, + SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionOptionalParams, + SparkJobDefinitionGetSparkJobDefinitionResponse, + SparkJobDefinitionExecuteSparkJobDefinitionResponse, + ArtifactRenameRequest, + SparkJobDefinitionDebugSparkJobDefinitionResponse, + SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a SparkJobDefinition. + */ +export class SparkJobDefinition { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class SparkJobDefinition class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists spark job definitions. + * @param options The options parameters. + */ + public listSparkJobDefinitionsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getSparkJobDefinitionsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getSparkJobDefinitionsByWorkspacePagingPage(options); + } + }; + } + + private async *getSparkJobDefinitionsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getSparkJobDefinitionsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getSparkJobDefinitionsByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getSparkJobDefinitionsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getSparkJobDefinitionsByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists spark job definitions. + * @param options The options parameters. + */ + private async _getSparkJobDefinitionsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getSparkJobDefinitionsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionsByWorkspaceOperationSpec + ); + return result as SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Creates or updates a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param sparkJobDefinition Spark Job Definition resource definition. + * @param options The options parameters. + */ + async createOrUpdateSparkJobDefinition( + sparkJobDefinitionName: string, + sparkJobDefinition: SparkJobDefinitionResource, + options?: SparkJobDefinitionCreateOrUpdateSparkJobDefinitionOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateSparkJobDefinition", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + sparkJobDefinition, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createOrUpdateSparkJobDefinitionOperationSpec + ); + return result as SparkJobDefinitionCreateOrUpdateSparkJobDefinitionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Gets a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + async getSparkJobDefinition( + sparkJobDefinitionName: string, + options?: SparkJobDefinitionGetSparkJobDefinitionOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getSparkJobDefinition", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionOperationSpec + ); + return result as SparkJobDefinitionGetSparkJobDefinitionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a Spark Job Definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + async deleteSparkJobDefinition( + sparkJobDefinitionName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteSparkJobDefinition", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + deleteSparkJobDefinitionOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Executes the spark job definition. + * @param sparkJobDefinitionName The spark job definition name. + * @param options The options parameters. + */ + async executeSparkJobDefinition( + sparkJobDefinitionName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-executeSparkJobDefinition", + this.getOperationOptions(options, "location") + ); + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as SparkJobDefinitionExecuteSparkJobDefinitionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + executeSparkJobDefinitionOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: executeSparkJobDefinitionOperationSpec, + initialOperationResult, + sendOperation, + finalStateVia: "location" + }); + } + + /** + * Renames a sparkJobDefinition. + * @param sparkJobDefinitionName The spark job definition name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameSparkJobDefinition( + sparkJobDefinitionName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameSparkJobDefinition", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionName, + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameSparkJobDefinitionOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameSparkJobDefinitionOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Debug the spark job definition. + * @param sparkJobDefinitionAzureResource Spark Job Definition resource definition. + * @param options The options parameters. + */ + async debugSparkJobDefinition( + sparkJobDefinitionAzureResource: SparkJobDefinitionResource, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-debugSparkJobDefinition", + this.getOperationOptions(options, "location") + ); + const operationArguments: coreHttp.OperationArguments = { + sparkJobDefinitionAzureResource, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as SparkJobDefinitionDebugSparkJobDefinitionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + debugSparkJobDefinitionOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: debugSparkJobDefinitionOperationSpec, + initialOperationResult, + sendOperation, + finalStateVia: "location" + }); + } + + /** + * GetSparkJobDefinitionsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the + * GetSparkJobDefinitionsByWorkspace method. + * @param options The options parameters. + */ + private async _getSparkJobDefinitionsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getSparkJobDefinitionsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkJobDefinitionsByWorkspaceNextOperationSpec + ); + return result as SparkJobDefinitionGetSparkJobDefinitionsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSparkJobDefinitionsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkJobDefinitionsListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.SparkJobDefinitionResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.sparkJobDefinition, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + mediaType: "json", + serializer +}; +const getSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkJobDefinitionResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept], + serializer +}; +const executeSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}/execute", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJob + }, + 201: { + bodyMapper: Mappers.SparkBatchJob + }, + 202: { + bodyMapper: Mappers.SparkBatchJob + }, + 204: { + bodyMapper: Mappers.SparkBatchJob + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept], + serializer +}; +const renameSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/sparkJobDefinitions/{sparkJobDefinitionName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sparkJobDefinitionName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const debugSparkJobDefinitionOperationSpec: coreHttp.OperationSpec = { + path: "/debugSparkJobDefinition", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJob + }, + 201: { + bodyMapper: Mappers.SparkBatchJob + }, + 202: { + bodyMapper: Mappers.SparkBatchJob + }, + 204: { + bodyMapper: Mappers.SparkBatchJob + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.sparkJobDefinitionAzureResource, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkJobDefinitionsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkJobDefinitionsListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts new file mode 100644 index 000000000000..38a4de821c44 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlPools.ts @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { SqlPoolsListResponse, SqlPoolsGetResponse } from "../models"; + +/** + * Class representing a SqlPools. + */ +export class SqlPools { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class SqlPools class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * List Sql Pools + * @param options The options parameters. + */ + async list(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-list", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); + return result as SqlPoolsListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Get Sql Pool + * @param sqlPoolName The Sql Pool name + * @param options The options parameters. + */ + async get( + sqlPoolName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sqlPoolName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + return result as SqlPoolsGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const listOperationSpec: coreHttp.OperationSpec = { + path: "/sqlPools", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlPoolInfoListResult + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const getOperationSpec: coreHttp.OperationSpec = { + path: "/sqlPools/{sqlPoolName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlPool + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlPoolName], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts new file mode 100644 index 000000000000..af9671d7357e --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/sqlScript.ts @@ -0,0 +1,415 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + SqlScriptResource, + SqlScriptGetSqlScriptsByWorkspaceResponse, + SqlScriptCreateOrUpdateSqlScriptOptionalParams, + SqlScriptCreateOrUpdateSqlScriptResponse, + SqlScriptGetSqlScriptOptionalParams, + SqlScriptGetSqlScriptResponse, + ArtifactRenameRequest, + SqlScriptGetSqlScriptsByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a SqlScript. + */ +export class SqlScript { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class SqlScript class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists sql scripts. + * @param options The options parameters. + */ + public listSqlScriptsByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getSqlScriptsByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getSqlScriptsByWorkspacePagingPage(options); + } + }; + } + + private async *getSqlScriptsByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getSqlScriptsByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getSqlScriptsByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getSqlScriptsByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getSqlScriptsByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists sql scripts. + * @param options The options parameters. + */ + private async _getSqlScriptsByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getSqlScriptsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSqlScriptsByWorkspaceOperationSpec + ); + return result as SqlScriptGetSqlScriptsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Creates or updates a Sql Script. + * @param sqlScriptName The sql script name. + * @param sqlScript Sql Script resource definition. + * @param options The options parameters. + */ + async createOrUpdateSqlScript( + sqlScriptName: string, + sqlScript: SqlScriptResource, + options?: SqlScriptCreateOrUpdateSqlScriptOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateSqlScript", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sqlScriptName, + sqlScript, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createOrUpdateSqlScriptOperationSpec + ); + return result as SqlScriptCreateOrUpdateSqlScriptResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Gets a sql script. + * @param sqlScriptName The sql script name. + * @param options The options parameters. + */ + async getSqlScript( + sqlScriptName: string, + options?: SqlScriptGetSqlScriptOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getSqlScript", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sqlScriptName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSqlScriptOperationSpec + ); + return result as SqlScriptGetSqlScriptResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a Sql Script. + * @param sqlScriptName The sql script name. + * @param options The options parameters. + */ + async deleteSqlScript( + sqlScriptName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteSqlScript", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sqlScriptName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + deleteSqlScriptOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Renames a sqlScript. + * @param sqlScriptName The sql script name. + * @param request proposed new name. + * @param options The options parameters. + */ + async renameSqlScript( + sqlScriptName: string, + request: ArtifactRenameRequest, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-renameSqlScript", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + sqlScriptName, + request, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + renameSqlScriptOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: renameSqlScriptOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetSqlScriptsByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetSqlScriptsByWorkspace + * method. + * @param options The options parameters. + */ + private async _getSqlScriptsByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getSqlScriptsByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSqlScriptsByWorkspaceNextOperationSpec + ); + return result as SqlScriptGetSqlScriptsByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSqlScriptsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlScriptsListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateSqlScriptOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts/{sqlScriptName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.SqlScriptResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.sqlScript, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + mediaType: "json", + serializer +}; +const getSqlScriptOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts/{sqlScriptName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlScriptResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteSqlScriptOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts/{sqlScriptName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], + headerParameters: [Parameters.accept], + serializer +}; +const renameSqlScriptOperationSpec: coreHttp.OperationSpec = { + path: "/sqlScripts/{sqlScriptName}/rename", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.request, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.sqlScriptName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSqlScriptsByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlScriptsListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/trigger.ts b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts new file mode 100644 index 000000000000..e226d1d9d8b7 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/trigger.ts @@ -0,0 +1,706 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { LROPoller, shouldDeserializeLRO } from "../lro"; +import { + TriggerResource, + TriggerGetTriggersByWorkspaceResponse, + TriggerCreateOrUpdateTriggerOptionalParams, + TriggerCreateOrUpdateTriggerResponse, + TriggerGetTriggerOptionalParams, + TriggerGetTriggerResponse, + TriggerSubscribeTriggerToEventsResponse, + TriggerGetEventSubscriptionStatusResponse, + TriggerUnsubscribeTriggerFromEventsResponse, + TriggerGetTriggersByWorkspaceNextResponse +} from "../models"; + +/** + * Class representing a Trigger. + */ +export class Trigger { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Trigger class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Lists triggers. + * @param options The options parameters. + */ + public listTriggersByWorkspace( + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.getTriggersByWorkspacePagingAll(options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.getTriggersByWorkspacePagingPage(options); + } + }; + } + + private async *getTriggersByWorkspacePagingPage( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._getTriggersByWorkspace(options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._getTriggersByWorkspaceNext(continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *getTriggersByWorkspacePagingAll( + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.getTriggersByWorkspacePagingPage(options)) { + yield* page; + } + } + + /** + * Lists triggers. + * @param options The options parameters. + */ + private async _getTriggersByWorkspace( + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getTriggersByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getTriggersByWorkspaceOperationSpec + ); + return result as TriggerGetTriggersByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Creates or updates a trigger. + * @param triggerName The trigger name. + * @param trigger Trigger resource definition. + * @param options The options parameters. + */ + async createOrUpdateTrigger( + triggerName: string, + trigger: TriggerResource, + options?: TriggerCreateOrUpdateTriggerOptionalParams + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-createOrUpdateTrigger", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + trigger, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as TriggerCreateOrUpdateTriggerResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + createOrUpdateTriggerOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: createOrUpdateTriggerOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Gets a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async getTrigger( + triggerName: string, + options?: TriggerGetTriggerOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getTrigger", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getTriggerOperationSpec + ); + return result as TriggerGetTriggerResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Deletes a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async deleteTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-deleteTrigger", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + deleteTriggerOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: deleteTriggerOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Subscribe event trigger to events. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async subscribeTriggerToEvents( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-subscribeTriggerToEvents", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as TriggerSubscribeTriggerToEventsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + subscribeTriggerToEventsOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: subscribeTriggerToEventsOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Get a trigger's event subscription status. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async getEventSubscriptionStatus( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getEventSubscriptionStatus", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getEventSubscriptionStatusOperationSpec + ); + return result as TriggerGetEventSubscriptionStatusResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Unsubscribe event trigger from events. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async unsubscribeTriggerFromEvents( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-unsubscribeTriggerFromEvents", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as TriggerUnsubscribeTriggerFromEventsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + unsubscribeTriggerFromEventsOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: unsubscribeTriggerFromEventsOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Starts a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async startTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-startTrigger", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + startTriggerOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: startTriggerOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * Stops a trigger. + * @param triggerName The trigger name. + * @param options The options parameters. + */ + async stopTrigger( + triggerName: string, + options?: coreHttp.OperationOptions + ): Promise> { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-stopTrigger", + this.getOperationOptions(options, "undefined") + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + options: updatedOptions + }; + const sendOperation = async ( + args: coreHttp.OperationArguments, + spec: coreHttp.OperationSpec + ) => { + try { + const result = await this.client.sendOperationRequest(args, spec); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + }; + + const initialOperationResult = await sendOperation( + operationArguments, + stopTriggerOperationSpec + ); + return new LROPoller({ + initialOperationArguments: operationArguments, + initialOperationSpec: stopTriggerOperationSpec, + initialOperationResult, + sendOperation + }); + } + + /** + * GetTriggersByWorkspaceNext + * @param nextLink The nextLink from the previous successful call to the GetTriggersByWorkspace method. + * @param options The options parameters. + */ + private async _getTriggersByWorkspaceNext( + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-_getTriggersByWorkspaceNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getTriggersByWorkspaceNextOperationSpec + ); + return result as TriggerGetTriggersByWorkspaceNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + private getOperationOptions( + options: TOptions | undefined, + finalStateVia?: string + ): coreHttp.RequestOptionsBase { + const operationOptions: coreHttp.OperationOptions = options || {}; + operationOptions.requestOptions = { + ...operationOptions.requestOptions, + shouldDeserialize: shouldDeserializeLRO(finalStateVia) + }; + return coreHttp.operationOptionsToRequestOptionsBase(operationOptions); + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getTriggersByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/triggers", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.TriggerListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; +const createOrUpdateTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.TriggerResource + }, + 201: { + bodyMapper: Mappers.TriggerResource + }, + 202: { + bodyMapper: Mappers.TriggerResource + }, + 204: { + bodyMapper: Mappers.TriggerResource + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.trigger, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.ifMatch], + mediaType: "json", + serializer +}; +const getTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.TriggerResource + }, + 304: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept, Parameters.ifNoneMatch], + serializer +}; +const deleteTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}", + httpMethod: "DELETE", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const subscribeTriggerToEventsOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/subscribeToEvents", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 201: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 202: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 204: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const getEventSubscriptionStatusOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/getEventSubscriptionStatus", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const unsubscribeTriggerFromEventsOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/unsubscribeFromEvents", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 201: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 202: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + 204: { + bodyMapper: Mappers.TriggerSubscriptionOperationStatus + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const startTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/start", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const stopTriggerOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/stop", + httpMethod: "POST", + responses: { + 200: {}, + 201: {}, + 202: {}, + 204: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const getTriggersByWorkspaceNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.TriggerListResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts new file mode 100644 index 000000000000..6141640254e7 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/triggerRun.ts @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { RunFilterParameters, TriggerRunQueryTriggerRunsByWorkspaceResponse } from "../models"; + +/** + * Class representing a TriggerRun. + */ +export class TriggerRun { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class TriggerRun class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Rerun single trigger instance by runId. + * @param triggerName The trigger name. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + async rerunTriggerInstance( + triggerName: string, + runId: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-rerunTriggerInstance", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + runId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + rerunTriggerInstanceOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Cancel single trigger instance by runId. + * @param triggerName The trigger name. + * @param runId The pipeline run identifier. + * @param options The options parameters. + */ + async cancelTriggerInstance( + triggerName: string, + runId: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-cancelTriggerInstance", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + triggerName, + runId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelTriggerInstanceOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Query trigger runs. + * @param filterParameters Parameters to filter the pipeline run. + * @param options The options parameters. + */ + async queryTriggerRunsByWorkspace( + filterParameters: RunFilterParameters, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-queryTriggerRunsByWorkspace", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + filterParameters, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + queryTriggerRunsByWorkspaceOperationSpec + ); + return result as TriggerRunQueryTriggerRunsByWorkspaceResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const rerunTriggerInstanceOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/triggerRuns/{runId}/rerun", + httpMethod: "POST", + responses: { + 200: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const cancelTriggerInstanceOperationSpec: coreHttp.OperationSpec = { + path: "/triggers/{triggerName}/triggerRuns/{runId}/cancel", + httpMethod: "POST", + responses: { + 200: {}, + default: { + bodyMapper: Mappers.CloudError + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.runId, Parameters.triggerName], + headerParameters: [Parameters.accept], + serializer +}; +const queryTriggerRunsByWorkspaceOperationSpec: coreHttp.OperationSpec = { + path: "/queryTriggerRuns", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.TriggerRunsQueryResponse + }, + default: { + bodyMapper: Mappers.CloudError + } + }, + requestBody: Parameters.filterParameters, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspace.ts b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts new file mode 100644 index 000000000000..cb9f8133a839 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/workspace.ts @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { WorkspaceGetResponse } from "../models"; + +/** + * Class representing a Workspace. + */ +export class Workspace { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class Workspace class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Get Workspace + * @param options The options parameters. + */ + async get(options?: coreHttp.OperationOptions): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + return result as WorkspaceGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getOperationSpec: coreHttp.OperationSpec = { + path: "/workspace", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.Workspace + }, + default: { + bodyMapper: Mappers.ErrorContract + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts new file mode 100644 index 000000000000..7c01377f73ea --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/operations/workspaceGitRepoManagement.ts @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ArtifactsClient } from "../artifactsClient"; +import { + GitHubAccessTokenRequest, + WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams, + WorkspaceGitRepoManagementGetGitHubAccessTokenResponse +} from "../models"; + +/** + * Class representing a WorkspaceGitRepoManagement. + */ +export class WorkspaceGitRepoManagement { + private readonly client: ArtifactsClient; + + /** + * Initialize a new instance of the class WorkspaceGitRepoManagement class. + * @param client Reference to the service client + */ + constructor(client: ArtifactsClient) { + this.client = client; + } + + /** + * Get the GitHub access token. + * @param gitHubAccessTokenRequest + * @param options The options parameters. + */ + async getGitHubAccessToken( + gitHubAccessTokenRequest: GitHubAccessTokenRequest, + options?: WorkspaceGitRepoManagementGetGitHubAccessTokenOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "ArtifactsClient-getGitHubAccessToken", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + gitHubAccessTokenRequest, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getGitHubAccessTokenOperationSpec + ); + return result as WorkspaceGitRepoManagementGetGitHubAccessTokenResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getGitHubAccessTokenOperationSpec: coreHttp.OperationSpec = { + path: "/getGitHubAccessToken", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.GitHubAccessTokenResponse + } + }, + requestBody: Parameters.gitHubAccessTokenRequest, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.contentType, Parameters.clientRequestId], + mediaType: "json", + serializer +}; diff --git a/sdk/synapse/synapse-artifacts/src/tracing.ts b/sdk/synapse/synapse-artifacts/src/tracing.ts new file mode 100644 index 000000000000..36ffdc4f7e26 --- /dev/null +++ b/sdk/synapse/synapse-artifacts/src/tracing.ts @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.Artifacts", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-artifacts/tsconfig.json b/sdk/synapse/synapse-artifacts/tsconfig.json new file mode 100644 index 000000000000..d43efedfc9bc --- /dev/null +++ b/sdk/synapse/synapse-artifacts/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./dist-esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md b/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md new file mode 100644 index 000000000000..106cbca530aa --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 (2020-12-09) + +- Initial release diff --git a/sdk/synapse/synapse-managed-private-endpoints/README.md b/sdk/synapse/synapse-managed-private-endpoints/README.md new file mode 100644 index 000000000000..a20661b376ab --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/README.md @@ -0,0 +1,64 @@ +## Azure Synapse Managed Private Endpoints client library for JavaScript + +This package contains an isomorphic SDK for Managed Private Endpoints. + +## Getting started + +### Install the package + +```bash +npm install @azure/synapse-managed-private-endpoints +``` + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts + +## Examples + +```ts +import { ManagedPrivateEndpointsClient } from "@azure/synapse-managed-private-endpoints"; +import { DefaultAzureCredential } from "@azure/identity"; + +export async function main(): Promise { + const credential = new DefaultAzureCredential(); + + let client = new ManagedPrivateEndpointsClient( + credential, + "https://mysynapse.dev.azuresynapse.net" + ); + let list = await client.managedPrivateEndpoints.list("myvnet"); + for await (let item of list) { + console.log("item:", item); + } +} +``` + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json b/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json new file mode 100644 index 000000000000..d9673b6381dd --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./types/synapse-managed-private-endpoints.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-managed-private-endpoints/package.json b/sdk/synapse/synapse-managed-private-endpoints/package.json new file mode 100644 index 000000000000..736332b45096 --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/package.json @@ -0,0 +1,75 @@ +{ + "name": "@azure/synapse-managed-private-endpoints", + "author": "Microsoft Corporation", + "description": "A generated SDK for ManagedPrivateEndpointsClient.", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-managed-private-endpoints/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "sdk-type": "client", + "version": "1.0.0-beta.1", + "dependencies": { + "@azure/core-paging": "^1.1.1", + "@azure/core-http": "^1.2.0", + "@opentelemetry/api": "^0.10.2", + "tslib": "^2.0.0" + }, + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "cloud", + "isomorphic" + ], + "license": "MIT", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", + "types": "./types/synapse-managed-private-endpoints.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "types/synapse-managed-private-endpoints.d.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", + "test": "echo skip", + "build:test": "echo skip", + "pack": "npm pack 2>&1", + "lint": "echo skipped", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md b/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md new file mode 100644 index 000000000000..cdfedb1dd3f1 --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/review/synapse-managed-private-endpoints.api.md @@ -0,0 +1,106 @@ +## API Report File for "@azure/synapse-managed-private-endpoints" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; + +// @public +export interface ManagedPrivateEndpoint { + readonly id?: string; + readonly name?: string; + properties?: ManagedPrivateEndpointProperties; + readonly type?: string; +} + +// @public +export interface ManagedPrivateEndpointConnectionState { + actionsRequired?: string; + description?: string; + readonly status?: string; +} + +// @public +export interface ManagedPrivateEndpointListResponse { + readonly nextLink?: string; + value?: ManagedPrivateEndpoint[]; +} + +// @public +export interface ManagedPrivateEndpointProperties { + connectionState?: ManagedPrivateEndpointConnectionState; + groupId?: string; + readonly isReserved?: boolean; + privateLinkResourceId?: string; + readonly provisioningState?: string; +} + +// @public (undocumented) +export class ManagedPrivateEndpointsClient extends ManagedPrivateEndpointsClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ManagedPrivateEndpointsClientOptionalParams); + // (undocumented) + managedPrivateEndpoints: ManagedPrivateEndpointsOperation; +} + +// @public (undocumented) +export class ManagedPrivateEndpointsClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: ManagedPrivateEndpointsClientOptionalParams); + // (undocumented) + apiVersion: string; + // (undocumented) + endpoint: string; +} + +// @public +export interface ManagedPrivateEndpointsClientOptionalParams extends coreHttp.ServiceClientOptions { + apiVersion?: string; + endpoint?: string; +} + +// @public +export type ManagedPrivateEndpointsCreateResponse = ManagedPrivateEndpoint & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ManagedPrivateEndpoint; + }; +}; + +// @public +export type ManagedPrivateEndpointsGetResponse = ManagedPrivateEndpoint & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ManagedPrivateEndpoint; + }; +}; + +// @public +export type ManagedPrivateEndpointsListNextResponse = ManagedPrivateEndpointListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ManagedPrivateEndpointListResponse; + }; +}; + +// @public +export type ManagedPrivateEndpointsListResponse = ManagedPrivateEndpointListResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: ManagedPrivateEndpointListResponse; + }; +}; + +// @public +export class ManagedPrivateEndpointsOperation { + constructor(client: ManagedPrivateEndpointsClient); + create(managedVirtualNetworkName: string, managedPrivateEndpointName: string, managedPrivateEndpoint: ManagedPrivateEndpoint, options?: coreHttp.OperationOptions): Promise; + delete(managedVirtualNetworkName: string, managedPrivateEndpointName: string, options?: coreHttp.OperationOptions): Promise; + get(managedVirtualNetworkName: string, managedPrivateEndpointName: string, options?: coreHttp.OperationOptions): Promise; + list(managedVirtualNetworkName: string, options?: coreHttp.OperationOptions): PagedAsyncIterableIterator; + } + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js new file mode 100644 index 000000000000..938c844ba214 --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/rollup.config.js @@ -0,0 +1,41 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./dist-esm/managedPrivateEndpointsClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/index.js", + format: "cjs", + name: "Azure.SynapseManagedPrivateEndpoints", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] +}; + +export default config; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/index.ts new file mode 100644 index 000000000000..cd130f2af113 --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/index.ts @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +export * from "./models"; +export { ManagedPrivateEndpointsClient } from "./managedPrivateEndpointsClient"; +export { ManagedPrivateEndpointsClientContext } from "./managedPrivateEndpointsClientContext"; +export { ManagedPrivateEndpoints as ManagedPrivateEndpointsOperation } from "./operations"; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts new file mode 100644 index 000000000000..a473d2ac17f3 --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClient.ts @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { ManagedPrivateEndpoints } from "./operations"; +import { ManagedPrivateEndpointsClientContext } from "./managedPrivateEndpointsClientContext"; +import { ManagedPrivateEndpointsClientOptionalParams } from "./models"; + +export class ManagedPrivateEndpointsClient extends ManagedPrivateEndpointsClientContext { + /** + * Initializes a new instance of the ManagedPrivateEndpointsClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: ManagedPrivateEndpointsClientOptionalParams + ) { + super(credentials, endpoint, options); + this.managedPrivateEndpoints = new ManagedPrivateEndpoints(this); + } + + managedPrivateEndpoints: ManagedPrivateEndpoints; +} diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts new file mode 100644 index 000000000000..fa65d3792b4c --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/managedPrivateEndpointsClientContext.ts @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { ManagedPrivateEndpointsClientOptionalParams } from "./models"; + +const packageName = "@azure/synapse-managed-private-endpoints"; +const packageVersion = "1.0.0"; + +export class ManagedPrivateEndpointsClientContext extends coreHttp.ServiceClient { + endpoint: string; + apiVersion: string; + + /** + * Initializes a new instance of the ManagedPrivateEndpointsClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: ManagedPrivateEndpointsClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + if (!options.credentialScopes) { + options.credentialScopes = ["https://dev.azuresynapse.net/.default"]; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = options.endpoint || "{endpoint}"; + + // Parameter assignments + this.endpoint = endpoint; + + // Assigning values to Constant parameters + this.apiVersion = options.apiVersion || "2019-06-01-preview"; + } +} diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts new file mode 100644 index 000000000000..b7142b7e34f9 --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/index.ts @@ -0,0 +1,184 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +/** + * Managed private endpoint + */ +export interface ManagedPrivateEndpoint { + /** + * Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly id?: string; + /** + * The name of the resource + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly name?: string; + /** + * The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly type?: string; + /** + * Managed private endpoint properties + */ + properties?: ManagedPrivateEndpointProperties; +} + +/** + * Properties of a managed private endpoint + */ +export interface ManagedPrivateEndpointProperties { + /** + * The ARM resource ID of the resource to which the managed private endpoint is created + */ + privateLinkResourceId?: string; + /** + * The groupId to which the managed private endpoint is created + */ + groupId?: string; + /** + * The managed private endpoint provisioning state + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly provisioningState?: string; + /** + * The managed private endpoint connection state + */ + connectionState?: ManagedPrivateEndpointConnectionState; + /** + * Denotes whether the managed private endpoint is reserved + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly isReserved?: boolean; +} + +/** + * The connection state of a managed private endpoint + */ +export interface ManagedPrivateEndpointConnectionState { + /** + * The approval status + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly status?: string; + /** + * The managed private endpoint description + */ + description?: string; + /** + * The actions required on the managed private endpoint + */ + actionsRequired?: string; +} + +/** + * A list of managed private endpoints + */ +export interface ManagedPrivateEndpointListResponse { + /** + * List of managed private endpoints + */ + value?: ManagedPrivateEndpoint[]; + /** + * The link to the next page of results, if any remaining results exist. + * NOTE: This property will not be serialized. It can only be populated by the server. + */ + readonly nextLink?: string; +} + +/** + * Contains response data for the get operation. + */ +export type ManagedPrivateEndpointsGetResponse = ManagedPrivateEndpoint & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ManagedPrivateEndpoint; + }; +}; + +/** + * Contains response data for the create operation. + */ +export type ManagedPrivateEndpointsCreateResponse = ManagedPrivateEndpoint & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ManagedPrivateEndpoint; + }; +}; + +/** + * Contains response data for the list operation. + */ +export type ManagedPrivateEndpointsListResponse = ManagedPrivateEndpointListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ManagedPrivateEndpointListResponse; + }; +}; + +/** + * Contains response data for the listNext operation. + */ +export type ManagedPrivateEndpointsListNextResponse = ManagedPrivateEndpointListResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: ManagedPrivateEndpointListResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface ManagedPrivateEndpointsClientOptionalParams extends coreHttp.ServiceClientOptions { + /** + * Api Version + */ + apiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts new file mode 100644 index 000000000000..93fb506b8311 --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/mappers.ts @@ -0,0 +1,138 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +export const ManagedPrivateEndpoint: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedPrivateEndpoint", + modelProperties: { + id: { + serializedName: "id", + readOnly: true, + type: { + name: "String" + } + }, + name: { + serializedName: "name", + readOnly: true, + type: { + name: "String" + } + }, + type: { + serializedName: "type", + readOnly: true, + type: { + name: "String" + } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "ManagedPrivateEndpointProperties" + } + } + } + } +}; + +export const ManagedPrivateEndpointProperties: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedPrivateEndpointProperties", + modelProperties: { + privateLinkResourceId: { + serializedName: "privateLinkResourceId", + type: { + name: "String" + } + }, + groupId: { + serializedName: "groupId", + type: { + name: "String" + } + }, + provisioningState: { + serializedName: "provisioningState", + readOnly: true, + type: { + name: "String" + } + }, + connectionState: { + serializedName: "connectionState", + type: { + name: "Composite", + className: "ManagedPrivateEndpointConnectionState" + } + }, + isReserved: { + serializedName: "isReserved", + readOnly: true, + type: { + name: "Boolean" + } + } + } + } +}; + +export const ManagedPrivateEndpointConnectionState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedPrivateEndpointConnectionState", + modelProperties: { + status: { + serializedName: "status", + readOnly: true, + type: { + name: "String" + } + }, + description: { + serializedName: "description", + type: { + name: "String" + } + }, + actionsRequired: { + serializedName: "actionsRequired", + type: { + name: "String" + } + } + } + } +}; + +export const ManagedPrivateEndpointListResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "ManagedPrivateEndpointListResponse", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ManagedPrivateEndpoint" + } + } + } + }, + nextLink: { + serializedName: "nextLink", + readOnly: true, + type: { + name: "String" + } + } + } + } +}; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts b/sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts new file mode 100644 index 000000000000..08ddd10470ab --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/models/parameters.ts @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; +import { ManagedPrivateEndpoint as ManagedPrivateEndpointMapper } from "../models/mappers"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const apiVersion: OperationQueryParameter = { + parameterPath: "apiVersion", + mapper: { + defaultValue: "2019-06-01-preview", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const managedVirtualNetworkName: OperationURLParameter = { + parameterPath: "managedVirtualNetworkName", + mapper: { + serializedName: "managedVirtualNetworkName", + required: true, + type: { + name: "String" + } + } +}; + +export const managedPrivateEndpointName: OperationURLParameter = { + parameterPath: "managedPrivateEndpointName", + mapper: { + serializedName: "managedPrivateEndpointName", + required: true, + type: { + name: "String" + } + } +}; + +export const contentType: OperationParameter = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; + +export const managedPrivateEndpoint: OperationParameter = { + parameterPath: "managedPrivateEndpoint", + mapper: ManagedPrivateEndpointMapper +}; + +export const nextLink: OperationURLParameter = { + parameterPath: "nextLink", + mapper: { + serializedName: "nextLink", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts new file mode 100644 index 000000000000..ca1a4a2ff79d --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/operations/index.ts @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./managedPrivateEndpoints"; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts new file mode 100644 index 000000000000..818a7ad579ad --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/operations/managedPrivateEndpoints.ts @@ -0,0 +1,337 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import { PagedAsyncIterableIterator } from "@azure/core-paging"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { ManagedPrivateEndpointsClient } from "../managedPrivateEndpointsClient"; +import { + ManagedPrivateEndpoint, + ManagedPrivateEndpointsGetResponse, + ManagedPrivateEndpointsCreateResponse, + ManagedPrivateEndpointsListResponse, + ManagedPrivateEndpointsListNextResponse +} from "../models"; + +/** + * Class representing a ManagedPrivateEndpoints. + */ +export class ManagedPrivateEndpoints { + private readonly client: ManagedPrivateEndpointsClient; + + /** + * Initialize a new instance of the class ManagedPrivateEndpoints class. + * @param client Reference to the service client + */ + constructor(client: ManagedPrivateEndpointsClient) { + this.client = client; + } + + /** + * List Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param options The options parameters. + */ + public list( + managedVirtualNetworkName: string, + options?: coreHttp.OperationOptions + ): PagedAsyncIterableIterator { + const iter = this.listPagingAll(managedVirtualNetworkName, options); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: () => { + return this.listPagingPage(managedVirtualNetworkName, options); + } + }; + } + + private async *listPagingPage( + managedVirtualNetworkName: string, + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + let result = await this._list(managedVirtualNetworkName, options); + yield result.value || []; + let continuationToken = result.nextLink; + while (continuationToken) { + result = await this._listNext(managedVirtualNetworkName, continuationToken, options); + continuationToken = result.nextLink; + yield result.value || []; + } + } + + private async *listPagingAll( + managedVirtualNetworkName: string, + options?: coreHttp.OperationOptions + ): AsyncIterableIterator { + for await (const page of this.listPagingPage(managedVirtualNetworkName, options)) { + yield* page; + } + } + + /** + * Get Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param managedPrivateEndpointName Managed private endpoint name + * @param options The options parameters. + */ + async get( + managedVirtualNetworkName: string, + managedPrivateEndpointName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-get", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + managedPrivateEndpointName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, getOperationSpec); + return result as ManagedPrivateEndpointsGetResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Create Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param managedPrivateEndpointName Managed private endpoint name + * @param managedPrivateEndpoint Managed private endpoint properties. + * @param options The options parameters. + */ + async create( + managedVirtualNetworkName: string, + managedPrivateEndpointName: string, + managedPrivateEndpoint: ManagedPrivateEndpoint, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-create", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + managedPrivateEndpointName, + managedPrivateEndpoint, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createOperationSpec + ); + return result as ManagedPrivateEndpointsCreateResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Delete Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param managedPrivateEndpointName Managed private endpoint name + * @param options The options parameters. + */ + async delete( + managedVirtualNetworkName: string, + managedPrivateEndpointName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-delete", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + managedPrivateEndpointName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + deleteOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * List Managed Private Endpoints + * @param managedVirtualNetworkName Managed virtual network name + * @param options The options parameters. + */ + private async _list( + managedVirtualNetworkName: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-_list", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest(operationArguments, listOperationSpec); + return result as ManagedPrivateEndpointsListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * ListNext + * @param managedVirtualNetworkName Managed virtual network name + * @param nextLink The nextLink from the previous successful call to the List method. + * @param options The options parameters. + */ + private async _listNext( + managedVirtualNetworkName: string, + nextLink: string, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "ManagedPrivateEndpointsClient-_listNext", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + managedVirtualNetworkName, + nextLink, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + listNextOperationSpec + ); + return result as ManagedPrivateEndpointsListNextResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getOperationSpec: coreHttp.OperationSpec = { + path: + "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ManagedPrivateEndpoint + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.managedVirtualNetworkName, + Parameters.managedPrivateEndpointName + ], + headerParameters: [Parameters.accept], + serializer +}; +const createOperationSpec: coreHttp.OperationSpec = { + path: + "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + httpMethod: "PUT", + responses: { + 200: { + bodyMapper: Mappers.ManagedPrivateEndpoint + } + }, + requestBody: Parameters.managedPrivateEndpoint, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.managedVirtualNetworkName, + Parameters.managedPrivateEndpointName + ], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const deleteOperationSpec: coreHttp.OperationSpec = { + path: + "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints/{managedPrivateEndpointName}", + httpMethod: "DELETE", + responses: { 202: {}, 204: {} }, + queryParameters: [Parameters.apiVersion], + urlParameters: [ + Parameters.endpoint, + Parameters.managedVirtualNetworkName, + Parameters.managedPrivateEndpointName + ], + serializer +}; +const listOperationSpec: coreHttp.OperationSpec = { + path: "/managedVirtualNetworks/{managedVirtualNetworkName}/managedPrivateEndpoints", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ManagedPrivateEndpointListResponse + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.managedVirtualNetworkName], + headerParameters: [Parameters.accept], + serializer +}; +const listNextOperationSpec: coreHttp.OperationSpec = { + path: "{nextLink}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ManagedPrivateEndpointListResponse + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint, Parameters.managedVirtualNetworkName, Parameters.nextLink], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts b/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts new file mode 100644 index 000000000000..ec5a2da97e89 --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/src/tracing.ts @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.ManagedPrivateEndpoints", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-managed-private-endpoints/tsconfig.json b/sdk/synapse/synapse-managed-private-endpoints/tsconfig.json new file mode 100644 index 000000000000..d43efedfc9bc --- /dev/null +++ b/sdk/synapse/synapse-managed-private-endpoints/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./dist-esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/sdk/synapse/synapse-monitoring/CHANGELOG.md b/sdk/synapse/synapse-monitoring/CHANGELOG.md new file mode 100644 index 000000000000..106cbca530aa --- /dev/null +++ b/sdk/synapse/synapse-monitoring/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 (2020-12-09) + +- Initial release diff --git a/sdk/synapse/synapse-monitoring/README.md b/sdk/synapse/synapse-monitoring/README.md new file mode 100644 index 000000000000..6a3b8cc78e54 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/README.md @@ -0,0 +1,59 @@ +## Azure Synapse Monitoring client library for JavaScript + +This package contains an isomorphic SDK for Monitoring. + +## Getting started + +### Install the package + +```bash +npm install @azure/synapse-monitoring +``` + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts + +## Examples + +```ts +import { MonitoringClient } from "@azure/synapse-monitoring"; +import { DefaultAzureCredential } from "@azure/identity"; + +export async function main(): Promise { + const credential = new DefaultAzureCredential(); + + let client = new MonitoringClient(credential, "https://mysynapse.dev.azuresynapse.net"); + let output = await client.monitoring.getSparkJobList(); + console.log("output:", output); +} +``` + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-monitoring/api-extractor.json b/sdk/synapse/synapse-monitoring/api-extractor.json new file mode 100644 index 000000000000..7433f42f5c3f --- /dev/null +++ b/sdk/synapse/synapse-monitoring/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./types/synapse-monitoring.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-monitoring/package.json b/sdk/synapse/synapse-monitoring/package.json new file mode 100644 index 000000000000..ec9d2bdf49e4 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/package.json @@ -0,0 +1,74 @@ +{ + "name": "@azure/synapse-monitoring", + "author": "Microsoft Corporation", + "description": "A generated SDK for MonitoringClient.", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-monitoring/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "sdk-type": "client", + "version": "1.0.0-beta.1", + "dependencies": { + "@azure/core-http": "^1.2.0", + "@opentelemetry/api": "^0.10.2", + "tslib": "^2.0.0" + }, + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "cloud", + "isomorphic" + ], + "license": "MIT", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", + "types": "./types/synapse-monitoring.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "types/synapse-monitoring.d.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", + "pack": "npm pack 2>&1", + "build:test": "echo skipped", + "lint": "echo skipped", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "test": "echo skipped", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md b/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md new file mode 100644 index 000000000000..316fee8ccc20 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/review/synapse-monitoring.api.md @@ -0,0 +1,121 @@ +## API Report File for "@azure/synapse-monitoring" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; + +// @public (undocumented) +export class MonitoringClient extends MonitoringClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: MonitoringClientOptionalParams); + // (undocumented) + monitoring: MonitoringOperation; +} + +// @public (undocumented) +export class MonitoringClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, options?: MonitoringClientOptionalParams); + // (undocumented) + apiVersion: string; + // (undocumented) + endpoint: string; +} + +// @public +export interface MonitoringClientOptionalParams extends coreHttp.ServiceClientOptions { + apiVersion?: string; + endpoint?: string; +} + +// @public +export interface MonitoringGetSparkJobListOptionalParams extends coreHttp.OperationOptions { + xMsClientRequestId?: string; +} + +// @public +export type MonitoringGetSparkJobListResponse = SparkJobListViewResponse & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkJobListViewResponse; + }; +}; + +// @public +export interface MonitoringGetSqlJobQueryStringOptionalParams extends coreHttp.OperationOptions { + // (undocumented) + filter?: string; + // (undocumented) + orderby?: string; + // (undocumented) + skip?: string; + xMsClientRequestId?: string; +} + +// @public +export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SqlQueryStringDataModel; + }; +}; + +// @public +export class MonitoringOperation { + constructor(client: MonitoringClient); + getSparkJobList(options?: MonitoringGetSparkJobListOptionalParams): Promise; + getSqlJobQueryString(options?: MonitoringGetSqlJobQueryStringOptionalParams): Promise; +} + +// @public (undocumented) +export interface SparkJob { + // (undocumented) + compute?: string; + // (undocumented) + endTime?: Date; + // (undocumented) + jobType?: string; + // (undocumented) + livyId?: string; + // (undocumented) + name?: string; + // (undocumented) + pipeline?: SparkJob[] | null; + // (undocumented) + queuedDuration?: string; + // (undocumented) + runningDuration?: string; + // (undocumented) + sparkApplicationId?: string; + // (undocumented) + sparkJobDefinition?: string | null; + // (undocumented) + state?: string; + // (undocumented) + submitter?: string; + // (undocumented) + submitTime?: Date; + // (undocumented) + timing?: string[]; + // (undocumented) + totalDuration?: string; +} + +// @public (undocumented) +export interface SparkJobListViewResponse { + // (undocumented) + nJobs?: number; + // (undocumented) + sparkJobs?: SparkJob[]; +} + +// @public (undocumented) +export interface SqlQueryStringDataModel { + // (undocumented) + query?: string; +} + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-monitoring/rollup.config.js b/sdk/synapse/synapse-monitoring/rollup.config.js new file mode 100644 index 000000000000..4eb6764521d6 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/rollup.config.js @@ -0,0 +1,41 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./dist-esm/monitoringClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/index.js", + format: "cjs", + name: "Azure.SynapseMonitoring", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] +}; + +export default config; diff --git a/sdk/synapse/synapse-monitoring/src/index.ts b/sdk/synapse/synapse-monitoring/src/index.ts new file mode 100644 index 000000000000..36a55288d039 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/index.ts @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./models"; +export { MonitoringClient } from "./monitoringClient"; +export { MonitoringClientContext } from "./monitoringClientContext"; +export { Monitoring as MonitoringOperation } from "./operations"; diff --git a/sdk/synapse/synapse-monitoring/src/models/index.ts b/sdk/synapse/synapse-monitoring/src/models/index.ts new file mode 100644 index 000000000000..447c3a9d5ba4 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/models/index.ts @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +export interface SparkJobListViewResponse { + nJobs?: number; + sparkJobs?: SparkJob[]; +} + +export interface SparkJob { + state?: string; + name?: string; + submitter?: string; + compute?: string; + sparkApplicationId?: string; + livyId?: string; + timing?: string[]; + sparkJobDefinition?: string | null; + pipeline?: SparkJob[] | null; + jobType?: string; + submitTime?: Date; + endTime?: Date; + queuedDuration?: string; + runningDuration?: string; + totalDuration?: string; +} + +export interface SqlQueryStringDataModel { + query?: string; +} + +/** + * Optional parameters. + */ +export interface MonitoringGetSparkJobListOptionalParams extends coreHttp.OperationOptions { + /** + * Can provide a guid, which is helpful for debugging and to provide better customer support + */ + xMsClientRequestId?: string; +} + +/** + * Contains response data for the getSparkJobList operation. + */ +export type MonitoringGetSparkJobListResponse = SparkJobListViewResponse & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkJobListViewResponse; + }; +}; + +/** + * Optional parameters. + */ +export interface MonitoringGetSqlJobQueryStringOptionalParams extends coreHttp.OperationOptions { + /** + * Can provide a guid, which is helpful for debugging and to provide better customer support + */ + xMsClientRequestId?: string; + filter?: string; + orderby?: string; + skip?: string; +} + +/** + * Contains response data for the getSqlJobQueryString operation. + */ +export type MonitoringGetSqlJobQueryStringResponse = SqlQueryStringDataModel & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SqlQueryStringDataModel; + }; +}; + +/** + * Optional parameters. + */ +export interface MonitoringClientOptionalParams extends coreHttp.ServiceClientOptions { + /** + * Api Version + */ + apiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-monitoring/src/models/mappers.ts b/sdk/synapse/synapse-monitoring/src/models/mappers.ts new file mode 100644 index 000000000000..230f5eeead71 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/models/mappers.ts @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +export const SparkJobListViewResponse: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJobListViewResponse", + modelProperties: { + nJobs: { + serializedName: "nJobs", + type: { + name: "Number" + } + }, + sparkJobs: { + serializedName: "sparkJobs", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkJob" + } + } + } + } + } + } +}; + +export const SparkJob: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkJob", + modelProperties: { + state: { + serializedName: "state", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + submitter: { + serializedName: "submitter", + type: { + name: "String" + } + }, + compute: { + serializedName: "compute", + type: { + name: "String" + } + }, + sparkApplicationId: { + serializedName: "sparkApplicationId", + type: { + name: "String" + } + }, + livyId: { + serializedName: "livyId", + type: { + name: "String" + } + }, + timing: { + serializedName: "timing", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + sparkJobDefinition: { + serializedName: "sparkJobDefinition", + nullable: true, + type: { + name: "String" + } + }, + pipeline: { + serializedName: "pipeline", + nullable: true, + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkJob" + } + } + } + }, + jobType: { + serializedName: "jobType", + type: { + name: "String" + } + }, + submitTime: { + serializedName: "submitTime", + type: { + name: "DateTime" + } + }, + endTime: { + serializedName: "endTime", + type: { + name: "DateTime" + } + }, + queuedDuration: { + serializedName: "queuedDuration", + type: { + name: "String" + } + }, + runningDuration: { + serializedName: "runningDuration", + type: { + name: "String" + } + }, + totalDuration: { + serializedName: "totalDuration", + type: { + name: "String" + } + } + } + } +}; + +export const SqlQueryStringDataModel: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SqlQueryStringDataModel", + modelProperties: { + query: { + serializedName: "query", + type: { + name: "String" + } + } + } + } +}; diff --git a/sdk/synapse/synapse-monitoring/src/models/parameters.ts b/sdk/synapse/synapse-monitoring/src/models/parameters.ts new file mode 100644 index 000000000000..299a2a659930 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/models/parameters.ts @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const xMsClientRequestId: OperationParameter = { + parameterPath: ["options", "xMsClientRequestId"], + mapper: { + serializedName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; + +export const apiVersion: OperationQueryParameter = { + parameterPath: "apiVersion", + mapper: { + defaultValue: "2019-11-01-preview", + isConstant: true, + serializedName: "api-version", + type: { + name: "String" + } + } +}; + +export const filter: OperationQueryParameter = { + parameterPath: ["options", "filter"], + mapper: { + serializedName: "filter", + type: { + name: "String" + } + } +}; + +export const orderby: OperationQueryParameter = { + parameterPath: ["options", "orderby"], + mapper: { + serializedName: "$orderby", + type: { + name: "String" + } + } +}; + +export const skip: OperationQueryParameter = { + parameterPath: ["options", "skip"], + mapper: { + serializedName: "skip", + type: { + name: "String" + } + } +}; diff --git a/sdk/synapse/synapse-monitoring/src/monitoringClient.ts b/sdk/synapse/synapse-monitoring/src/monitoringClient.ts new file mode 100644 index 000000000000..c1e74c64585d --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/monitoringClient.ts @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { Monitoring } from "./operations"; +import { MonitoringClientContext } from "./monitoringClientContext"; +import { MonitoringClientOptionalParams } from "./models"; + +export class MonitoringClient extends MonitoringClientContext { + /** + * Initializes a new instance of the MonitoringClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: MonitoringClientOptionalParams + ) { + super(credentials, endpoint, options); + this.monitoring = new Monitoring(this); + } + + monitoring: Monitoring; +} diff --git a/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts b/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts new file mode 100644 index 000000000000..09fe6641e0d1 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/monitoringClientContext.ts @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { MonitoringClientOptionalParams } from "./models"; + +const packageName = "@azure/synapse-monitoring"; +const packageVersion = "1.0.0"; + +export class MonitoringClientContext extends coreHttp.ServiceClient { + endpoint: string; + apiVersion: string; + + /** + * Initializes a new instance of the MonitoringClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + options?: MonitoringClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + if (!options.credentialScopes) { + options.credentialScopes = ["https://dev.azuresynapse.net/.default"]; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = options.endpoint || "{endpoint}"; + + // Parameter assignments + this.endpoint = endpoint; + + // Assigning values to Constant parameters + this.apiVersion = options.apiVersion || "2019-11-01-preview"; + } +} diff --git a/sdk/synapse/synapse-monitoring/src/operations/index.ts b/sdk/synapse/synapse-monitoring/src/operations/index.ts new file mode 100644 index 000000000000..a8ff2c177aec --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/operations/index.ts @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./monitoring"; diff --git a/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts new file mode 100644 index 000000000000..d526105f0c45 --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/operations/monitoring.ts @@ -0,0 +1,121 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { MonitoringClient } from "../monitoringClient"; +import { + MonitoringGetSparkJobListOptionalParams, + MonitoringGetSparkJobListResponse, + MonitoringGetSqlJobQueryStringOptionalParams, + MonitoringGetSqlJobQueryStringResponse +} from "../models"; + +/** + * Class representing a Monitoring. + */ +export class Monitoring { + private readonly client: MonitoringClient; + + /** + * Initialize a new instance of the class Monitoring class. + * @param client Reference to the service client + */ + constructor(client: MonitoringClient) { + this.client = client; + } + + /** + * Get list of spark applications for the workspace. + * @param options The options parameters. + */ + async getSparkJobList( + options?: MonitoringGetSparkJobListOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "MonitoringClient-getSparkJobList", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkJobListOperationSpec + ); + return result as MonitoringGetSparkJobListResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Get SQL OD/DW Query for the workspace. + * @param options The options parameters. + */ + async getSqlJobQueryString( + options?: MonitoringGetSqlJobQueryStringOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "MonitoringClient-getSqlJobQueryString", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSqlJobQueryStringOperationSpec + ); + return result as MonitoringGetSqlJobQueryStringResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSparkJobListOperationSpec: coreHttp.OperationSpec = { + path: "/monitoring/workloadTypes/spark/Applications", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkJobListViewResponse + } + }, + queryParameters: [Parameters.apiVersion], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.xMsClientRequestId], + serializer +}; +const getSqlJobQueryStringOperationSpec: coreHttp.OperationSpec = { + path: "/monitoring/workloadTypes/sql/querystring", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SqlQueryStringDataModel + } + }, + queryParameters: [Parameters.apiVersion, Parameters.filter, Parameters.orderby, Parameters.skip], + urlParameters: [Parameters.endpoint], + headerParameters: [Parameters.accept, Parameters.xMsClientRequestId], + serializer +}; diff --git a/sdk/synapse/synapse-monitoring/src/tracing.ts b/sdk/synapse/synapse-monitoring/src/tracing.ts new file mode 100644 index 000000000000..bf65d560face --- /dev/null +++ b/sdk/synapse/synapse-monitoring/src/tracing.ts @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.Monitoring", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-monitoring/tsconfig.json b/sdk/synapse/synapse-monitoring/tsconfig.json new file mode 100644 index 000000000000..d43efedfc9bc --- /dev/null +++ b/sdk/synapse/synapse-monitoring/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./dist-esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +} diff --git a/sdk/synapse/synapse-spark/CHANGELOG.md b/sdk/synapse/synapse-spark/CHANGELOG.md new file mode 100644 index 000000000000..106cbca530aa --- /dev/null +++ b/sdk/synapse/synapse-spark/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 1.0.0-beta.1 (2020-12-09) + +- Initial release diff --git a/sdk/synapse/synapse-spark/README.md b/sdk/synapse/synapse-spark/README.md new file mode 100644 index 000000000000..79816be8b0ef --- /dev/null +++ b/sdk/synapse/synapse-spark/README.md @@ -0,0 +1,50 @@ +## Azure Synapse Spark client library for JavaScript + +This package contains an isomorphic SDK for Spark. + +## Getting started + +### Install the package + +```bash +npm install @azure/synapse-spark +``` + +### Currently supported environments + +- Node.js version 8.x.x or higher +- Browser JavaScript + +## Key concepts + +### How to use + +## Examples + +In the future, we will share samples here. + +## Related projects + +- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js) + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcdn%2Farm-cdn%2FREADME.png) + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +import { setLogLevel } from "@azure/logger"; + +setLogLevel("info"); +``` + +## Next steps + +In the future, you'll find additional code samples here. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/master/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fkeyvault%2Fkeyvault-keys%2FREADME.png) diff --git a/sdk/synapse/synapse-spark/api-extractor.json b/sdk/synapse/synapse-spark/api-extractor.json new file mode 100644 index 000000000000..b49278775b4e --- /dev/null +++ b/sdk/synapse/synapse-spark/api-extractor.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json", + "mainEntryPointFilePath": "./dist-esm/index.d.ts", + "docModel": { "enabled": true }, + "apiReport": { "enabled": true, "reportFolder": "./review" }, + "dtsRollup": { + "enabled": true, + "untrimmedFilePath": "", + "publicTrimmedFilePath": "./types/synapse-spark.d.ts" + }, + "messages": { + "tsdocMessageReporting": { "default": { "logLevel": "none" } }, + "extractorMessageReporting": { + "ae-missing-release-tag": { "logLevel": "none" }, + "ae-unresolved-link": { "logLevel": "none" } + } + } +} diff --git a/sdk/synapse/synapse-spark/package.json b/sdk/synapse/synapse-spark/package.json new file mode 100644 index 000000000000..5c130161725c --- /dev/null +++ b/sdk/synapse/synapse-spark/package.json @@ -0,0 +1,74 @@ +{ + "name": "@azure/synapse-spark", + "author": "Microsoft Corporation", + "description": "A generated SDK for SparkClient.", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/synapse/synapse-spark/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "sdk-type": "client", + "version": "1.0.0-beta.1", + "dependencies": { + "@azure/core-http": "^1.2.0", + "@opentelemetry/api": "^0.10.2", + "tslib": "^2.0.0" + }, + "keywords": [ + "node", + "azure", + "typescript", + "browser", + "cloud", + "isomorphic" + ], + "license": "MIT", + "engine": { + "node": ">=8.0.0" + }, + "engines": { + "node": ">=8.0.0" + }, + "main": "./dist/index.js", + "module": "./dist-esm/src/index.js", + "types": "./types/synapse-spark.d.ts", + "devDependencies": { + "typescript": "~3.9.3", + "eslint": "^6.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "rollup": "^1.16.3", + "rollup-plugin-node-resolve": "^3.4.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "@rollup/plugin-commonjs": "11.0.2", + "uglify-js": "^3.4.9", + "@microsoft/api-extractor": "7.7.11" + }, + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "dist/**/*.d.ts", + "dist/**/*.d.ts.map", + "esm/**/*.js", + "esm/**/*.js.map", + "esm/**/*.d.ts", + "esm/**/*.d.ts.map", + "types/synapse-spark.d.ts", + "README.md", + "rollup.config.js", + "tsconfig.json" + ], + "scripts": { + "build": "tsc && rollup -c rollup.config.js && npm run minify && npm run extract-api", + "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js", + "pack": "npm pack 2>&1", + "build:test": "echo skipped", + "test": "echo skipped", + "lint": "echo skipped", + "unit-test:browser": "echo skipped", + "unit-test:node": "echo skipped", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "extract-api": "api-extractor run --local" + }, + "sideEffects": false, + "autoPublish": true +} diff --git a/sdk/synapse/synapse-spark/review/synapse-spark.api.md b/sdk/synapse/synapse-spark/review/synapse-spark.api.md new file mode 100644 index 000000000000..ce5717326e11 --- /dev/null +++ b/sdk/synapse/synapse-spark/review/synapse-spark.api.md @@ -0,0 +1,601 @@ +## API Report File for "@azure/synapse-spark" + +> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/). + +```ts + +import * as coreHttp from '@azure/core-http'; + +// @public +export const enum KnownPluginCurrentState { + // (undocumented) + Cleanup = "Cleanup", + // (undocumented) + Ended = "Ended", + // (undocumented) + Monitoring = "Monitoring", + // (undocumented) + Preparation = "Preparation", + // (undocumented) + Queued = "Queued", + // (undocumented) + ResourceAcquisition = "ResourceAcquisition", + // (undocumented) + Submission = "Submission" +} + +// @public +export const enum KnownSchedulerCurrentState { + // (undocumented) + Ended = "Ended", + // (undocumented) + Queued = "Queued", + // (undocumented) + Scheduled = "Scheduled" +} + +// @public +export const enum KnownSparkBatchJobResultType { + // (undocumented) + Cancelled = "Cancelled", + // (undocumented) + Failed = "Failed", + // (undocumented) + Succeeded = "Succeeded", + // (undocumented) + Uncertain = "Uncertain" +} + +// @public +export const enum KnownSparkErrorSource { + // (undocumented) + Dependency = "Dependency", + // (undocumented) + System = "System", + // (undocumented) + Unknown = "Unknown", + // (undocumented) + User = "User" +} + +// @public +export const enum KnownSparkJobType { + // (undocumented) + SparkBatch = "SparkBatch", + // (undocumented) + SparkSession = "SparkSession" +} + +// @public +export const enum KnownSparkSessionResultType { + // (undocumented) + Cancelled = "Cancelled", + // (undocumented) + Failed = "Failed", + // (undocumented) + Succeeded = "Succeeded", + // (undocumented) + Uncertain = "Uncertain" +} + +// @public +export const enum KnownSparkStatementLanguageType { + // (undocumented) + Dotnetspark = "dotnetspark", + // (undocumented) + Pyspark = "pyspark", + // (undocumented) + Spark = "spark", + // (undocumented) + Sql = "sql" +} + +// @public +export type PluginCurrentState = string; + +// @public +export type SchedulerCurrentState = string; + +// @public +export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; +} + +// @public +export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJob; + }; +}; + +// @public +export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; +} + +// @public +export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJob; + }; +}; + +// @public +export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; + fromParam?: number; + size?: number; +} + +// @public +export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkBatchJobCollection; + }; +}; + +// @public (undocumented) +export interface SparkBatchJob { + appId?: string | null; + appInfo?: { + [propertyName: string]: string; + } | null; + artifactId?: string; + errors?: SparkServiceError[]; + id: number; + jobType?: SparkJobType; + // (undocumented) + livyInfo?: SparkBatchJobState; + logLines?: string[] | null; + name?: string; + plugin?: SparkServicePlugin; + result?: SparkBatchJobResultType; + scheduler?: SparkScheduler; + sparkPoolName?: string; + state?: string; + submitterId?: string; + submitterName?: string; + tags?: { + [propertyName: string]: string; + }; + workspaceName?: string; +} + +// @public +export interface SparkBatchJobCollection { + from: number; + sessions?: SparkBatchJob[]; + total: number; +} + +// @public (undocumented) +export interface SparkBatchJobOptions { + // (undocumented) + archives?: string[]; + // (undocumented) + arguments?: string[]; + // (undocumented) + artifactId?: string; + // (undocumented) + className?: string; + configuration?: { + [propertyName: string]: string; + }; + // (undocumented) + driverCores?: number; + // (undocumented) + driverMemory?: string; + // (undocumented) + executorCores?: number; + // (undocumented) + executorCount?: number; + // (undocumented) + executorMemory?: string; + // (undocumented) + file: string; + // (undocumented) + files?: string[]; + // (undocumented) + jars?: string[]; + // (undocumented) + name: string; + // (undocumented) + pythonFiles?: string[]; + tags?: { + [propertyName: string]: string; + }; +} + +// @public +export type SparkBatchJobResultType = string; + +// @public (undocumented) +export interface SparkBatchJobState { + currentState?: string; + deadAt?: Date | null; + // (undocumented) + jobCreationRequest?: SparkRequest; + notStartedAt?: Date | null; + recoveringAt?: Date | null; + runningAt?: Date | null; + startingAt?: Date | null; + successAt?: Date | null; + terminatedAt?: Date | null; +} + +// @public +export class SparkBatchOperation { + constructor(client: SparkClient); + cancelSparkBatchJob(batchId: number, options?: coreHttp.OperationOptions): Promise; + createSparkBatchJob(sparkBatchJobOptions: SparkBatchJobOptions, options?: SparkBatchCreateSparkBatchJobOptionalParams): Promise; + getSparkBatchJob(batchId: number, options?: SparkBatchGetSparkBatchJobOptionalParams): Promise; + getSparkBatchJobs(options?: SparkBatchGetSparkBatchJobsOptionalParams): Promise; +} + +// @public (undocumented) +export class SparkClient extends SparkClientContext { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams); + // (undocumented) + sparkBatch: SparkBatchOperation; + // (undocumented) + sparkSession: SparkSessionOperation; +} + +// @public (undocumented) +export class SparkClientContext extends coreHttp.ServiceClient { + constructor(credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, endpoint: string, sparkPoolName: string, options?: SparkClientOptionalParams); + // (undocumented) + endpoint: string; + // (undocumented) + livyApiVersion: string; + // (undocumented) + sparkPoolName: string; +} + +// @public +export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { + endpoint?: string; + livyApiVersion?: string; +} + +// @public +export type SparkErrorSource = string; + +// @public +export type SparkJobType = string; + +// @public (undocumented) +export interface SparkRequest { + // (undocumented) + archives?: string[]; + // (undocumented) + arguments?: string[]; + // (undocumented) + className?: string; + configuration?: { + [propertyName: string]: string; + }; + // (undocumented) + driverCores?: number; + // (undocumented) + driverMemory?: string; + // (undocumented) + executorCores?: number; + // (undocumented) + executorCount?: number; + // (undocumented) + executorMemory?: string; + // (undocumented) + file?: string; + // (undocumented) + files?: string[]; + // (undocumented) + jars?: string[]; + // (undocumented) + name?: string; + // (undocumented) + pythonFiles?: string[]; +} + +// @public (undocumented) +export interface SparkScheduler { + // (undocumented) + cancellationRequestedAt?: Date; + // (undocumented) + currentState?: SchedulerCurrentState; + // (undocumented) + endedAt?: Date | null; + // (undocumented) + scheduledAt?: Date | null; + // (undocumented) + submittedAt?: Date | null; +} + +// @public (undocumented) +export interface SparkServiceError { + // (undocumented) + errorCode?: string; + // (undocumented) + message?: string; + // (undocumented) + source?: SparkErrorSource; +} + +// @public (undocumented) +export interface SparkServicePlugin { + // (undocumented) + cleanupStartedAt?: Date | null; + // (undocumented) + currentState?: PluginCurrentState; + // (undocumented) + monitoringStartedAt?: Date | null; + // (undocumented) + preparationStartedAt?: Date | null; + // (undocumented) + resourceAcquisitionStartedAt?: Date | null; + // (undocumented) + submissionStartedAt?: Date | null; +} + +// @public (undocumented) +export interface SparkSession { + // (undocumented) + appId?: string | null; + appInfo?: { + [propertyName: string]: string; + } | null; + // (undocumented) + artifactId?: string; + errors?: SparkServiceError[]; + // (undocumented) + id: number; + jobType?: SparkJobType; + // (undocumented) + livyInfo?: SparkSessionState; + // (undocumented) + logLines?: string[] | null; + // (undocumented) + name?: string; + // (undocumented) + plugin?: SparkServicePlugin; + // (undocumented) + result?: SparkSessionResultType; + // (undocumented) + scheduler?: SparkScheduler; + // (undocumented) + sparkPoolName?: string; + // (undocumented) + state?: string; + // (undocumented) + submitterId?: string; + // (undocumented) + submitterName?: string; + tags?: { + [propertyName: string]: string; + }; + // (undocumented) + workspaceName?: string; +} + +// @public +export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellationResult & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkStatementCancellationResult; + }; +}; + +// @public (undocumented) +export interface SparkSessionCollection { + // (undocumented) + from: number; + // (undocumented) + sessions?: SparkSession[]; + // (undocumented) + total: number; +} + +// @public +export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; +} + +// @public +export type SparkSessionCreateSparkSessionResponse = SparkSession & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkSession; + }; +}; + +// @public +export type SparkSessionCreateSparkStatementResponse = SparkStatement & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkStatement; + }; +}; + +// @public +export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; +} + +// @public +export type SparkSessionGetSparkSessionResponse = SparkSession & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkSession; + }; +}; + +// @public +export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { + detailed?: boolean; + fromParam?: number; + size?: number; +} + +// @public +export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkSessionCollection; + }; +}; + +// @public +export type SparkSessionGetSparkStatementResponse = SparkStatement & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkStatement; + }; +}; + +// @public +export type SparkSessionGetSparkStatementsResponse = SparkStatementCollection & { + _response: coreHttp.HttpResponse & { + bodyAsText: string; + parsedBody: SparkStatementCollection; + }; +}; + +// @public +export class SparkSessionOperation { + constructor(client: SparkClient); + cancelSparkSession(sessionId: number, options?: coreHttp.OperationOptions): Promise; + cancelSparkStatement(sessionId: number, statementId: number, options?: coreHttp.OperationOptions): Promise; + createSparkSession(sparkSessionOptions: SparkSessionOptions, options?: SparkSessionCreateSparkSessionOptionalParams): Promise; + createSparkStatement(sessionId: number, sparkStatementOptions: SparkStatementOptions, options?: coreHttp.OperationOptions): Promise; + getSparkSession(sessionId: number, options?: SparkSessionGetSparkSessionOptionalParams): Promise; + getSparkSessions(options?: SparkSessionGetSparkSessionsOptionalParams): Promise; + getSparkStatement(sessionId: number, statementId: number, options?: coreHttp.OperationOptions): Promise; + getSparkStatements(sessionId: number, options?: coreHttp.OperationOptions): Promise; + resetSparkSessionTimeout(sessionId: number, options?: coreHttp.OperationOptions): Promise; +} + +// @public (undocumented) +export interface SparkSessionOptions { + // (undocumented) + archives?: string[]; + // (undocumented) + arguments?: string[]; + // (undocumented) + artifactId?: string; + // (undocumented) + className?: string; + configuration?: { + [propertyName: string]: string; + }; + // (undocumented) + driverCores?: number; + // (undocumented) + driverMemory?: string; + // (undocumented) + executorCores?: number; + // (undocumented) + executorCount?: number; + // (undocumented) + executorMemory?: string; + // (undocumented) + file?: string; + // (undocumented) + files?: string[]; + // (undocumented) + jars?: string[]; + // (undocumented) + name: string; + // (undocumented) + pythonFiles?: string[]; + tags?: { + [propertyName: string]: string; + }; +} + +// @public +export type SparkSessionResultType = string; + +// @public (undocumented) +export interface SparkSessionState { + // (undocumented) + busyAt?: Date | null; + // (undocumented) + currentState?: string; + // (undocumented) + deadAt?: Date | null; + // (undocumented) + errorAt?: Date | null; + // (undocumented) + idleAt?: Date | null; + // (undocumented) + jobCreationRequest?: SparkRequest; + // (undocumented) + notStartedAt?: Date | null; + // (undocumented) + recoveringAt?: Date | null; + // (undocumented) + shuttingDownAt?: Date | null; + // (undocumented) + startingAt?: Date | null; + terminatedAt?: Date | null; +} + +// @public (undocumented) +export interface SparkStatement { + // (undocumented) + code?: string; + // (undocumented) + id: number; + // (undocumented) + output?: SparkStatementOutput; + // (undocumented) + state?: string; +} + +// @public (undocumented) +export interface SparkStatementCancellationResult { + // (undocumented) + msg?: string; +} + +// @public (undocumented) +export interface SparkStatementCollection { + // (undocumented) + statements?: SparkStatement[]; + // (undocumented) + total: number; +} + +// @public +export type SparkStatementLanguageType = string; + +// @public (undocumented) +export interface SparkStatementOptions { + // (undocumented) + code?: string; + // (undocumented) + kind?: SparkStatementLanguageType; +} + +// @public (undocumented) +export interface SparkStatementOutput { + data?: any; + // (undocumented) + errorName?: string; + // (undocumented) + errorValue?: string; + // (undocumented) + executionCount: number; + // (undocumented) + status?: string; + // (undocumented) + traceback?: string[]; +} + + +// (No @packageDocumentation comment for this package) + +``` diff --git a/sdk/synapse/synapse-spark/rollup.config.js b/sdk/synapse/synapse-spark/rollup.config.js new file mode 100644 index 000000000000..3510554961f9 --- /dev/null +++ b/sdk/synapse/synapse-spark/rollup.config.js @@ -0,0 +1,41 @@ +import rollup from "rollup"; +import nodeResolve from "rollup-plugin-node-resolve"; +import sourcemaps from "rollup-plugin-sourcemaps"; +import cjs from "@rollup/plugin-commonjs"; + +/** + * @type {rollup.RollupFileOptions} + */ +const config = { + input: "./dist-esm/sparkClient.js", + external: ["@azure/core-http", "@azure/core-arm"], + output: { + file: "./dist/index.js", + format: "cjs", + name: "Azure.SynapseSpark", + sourcemap: true, + globals: { + "@azure/core-http": "coreHttp", + "@azure/core-arm": "coreArm" + }, + banner: `/* + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ ` + }, + plugins: [ + nodeResolve({ module: true }), + sourcemaps(), + cjs({ + namedExports: { + assert: ["ok", "deepEqual", "equal", "fail", "deepStrictEqual", "strictEqual"], + "@opentelemetry/api": ["CanonicalCode", "SpanKind", "TraceFlags"] + } + }) + ] +}; + +export default config; diff --git a/sdk/synapse/synapse-spark/src/index.ts b/sdk/synapse/synapse-spark/src/index.ts new file mode 100644 index 000000000000..5cc2b60083cf --- /dev/null +++ b/sdk/synapse/synapse-spark/src/index.ts @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./models"; +export { SparkClient } from "./sparkClient"; +export { SparkClientContext } from "./sparkClientContext"; +export { + SparkBatch as SparkBatchOperation, + SparkSession as SparkSessionOperation +} from "./operations"; diff --git a/sdk/synapse/synapse-spark/src/models/index.ts b/sdk/synapse/synapse-spark/src/models/index.ts new file mode 100644 index 000000000000..3bc98153fa09 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/models/index.ts @@ -0,0 +1,757 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +/** + * Response for batch list operation. + */ +export interface SparkBatchJobCollection { + /** + * The start index of fetched sessions. + */ + from: number; + /** + * Number of sessions fetched. + */ + total: number; + /** + * Batch list + */ + sessions?: SparkBatchJob[]; +} + +export interface SparkBatchJob { + livyInfo?: SparkBatchJobState; + /** + * The batch name. + */ + name?: string; + /** + * The workspace name. + */ + workspaceName?: string; + /** + * The Spark pool name. + */ + sparkPoolName?: string; + /** + * The submitter name. + */ + submitterName?: string; + /** + * The submitter identifier. + */ + submitterId?: string; + /** + * The artifact identifier. + */ + artifactId?: string; + /** + * The job type. + */ + jobType?: SparkJobType; + /** + * The Spark batch job result. + */ + result?: SparkBatchJobResultType; + /** + * The scheduler information. + */ + scheduler?: SparkScheduler; + /** + * The plugin information. + */ + plugin?: SparkServicePlugin; + /** + * The error information. + */ + errors?: SparkServiceError[]; + /** + * The tags. + */ + tags?: { [propertyName: string]: string }; + /** + * The session Id. + */ + id: number; + /** + * The application id of this session + */ + appId?: string | null; + /** + * The detailed application info. + */ + appInfo?: { [propertyName: string]: string } | null; + /** + * The batch state + */ + state?: string; + /** + * The log lines. + */ + logLines?: string[] | null; +} + +export interface SparkBatchJobState { + /** + * the time that at which "not_started" livy state was first seen. + */ + notStartedAt?: Date | null; + /** + * the time that at which "starting" livy state was first seen. + */ + startingAt?: Date | null; + /** + * the time that at which "running" livy state was first seen. + */ + runningAt?: Date | null; + /** + * time that at which "dead" livy state was first seen. + */ + deadAt?: Date | null; + /** + * the time that at which "success" livy state was first seen. + */ + successAt?: Date | null; + /** + * the time that at which "killed" livy state was first seen. + */ + terminatedAt?: Date | null; + /** + * the time that at which "recovering" livy state was first seen. + */ + recoveringAt?: Date | null; + /** + * the Spark job state. + */ + currentState?: string; + jobCreationRequest?: SparkRequest; +} + +export interface SparkRequest { + name?: string; + file?: string; + className?: string; + arguments?: string[]; + jars?: string[]; + pythonFiles?: string[]; + files?: string[]; + archives?: string[]; + /** + * Dictionary of + */ + configuration?: { [propertyName: string]: string }; + driverMemory?: string; + driverCores?: number; + executorMemory?: string; + executorCores?: number; + executorCount?: number; +} + +export interface SparkScheduler { + submittedAt?: Date | null; + scheduledAt?: Date | null; + endedAt?: Date | null; + cancellationRequestedAt?: Date; + currentState?: SchedulerCurrentState; +} + +export interface SparkServicePlugin { + preparationStartedAt?: Date | null; + resourceAcquisitionStartedAt?: Date | null; + submissionStartedAt?: Date | null; + monitoringStartedAt?: Date | null; + cleanupStartedAt?: Date | null; + currentState?: PluginCurrentState; +} + +export interface SparkServiceError { + message?: string; + errorCode?: string; + source?: SparkErrorSource; +} + +export interface SparkBatchJobOptions { + /** + * Dictionary of + */ + tags?: { [propertyName: string]: string }; + artifactId?: string; + name: string; + file: string; + className?: string; + arguments?: string[]; + jars?: string[]; + pythonFiles?: string[]; + files?: string[]; + archives?: string[]; + /** + * Dictionary of + */ + configuration?: { [propertyName: string]: string }; + driverMemory?: string; + driverCores?: number; + executorMemory?: string; + executorCores?: number; + executorCount?: number; +} + +export interface SparkSessionCollection { + from: number; + total: number; + sessions?: SparkSession[]; +} + +export interface SparkSession { + livyInfo?: SparkSessionState; + name?: string; + workspaceName?: string; + sparkPoolName?: string; + submitterName?: string; + submitterId?: string; + artifactId?: string; + /** + * The job type. + */ + jobType?: SparkJobType; + result?: SparkSessionResultType; + scheduler?: SparkScheduler; + plugin?: SparkServicePlugin; + /** + * The error information. + */ + errors?: SparkServiceError[]; + /** + * Dictionary of + */ + tags?: { [propertyName: string]: string }; + id: number; + appId?: string | null; + /** + * Dictionary of + */ + appInfo?: { [propertyName: string]: string } | null; + state?: string; + logLines?: string[] | null; +} + +export interface SparkSessionState { + notStartedAt?: Date | null; + startingAt?: Date | null; + idleAt?: Date | null; + deadAt?: Date | null; + shuttingDownAt?: Date | null; + /** + * the time that at which "killed" livy state was first seen. + */ + terminatedAt?: Date | null; + recoveringAt?: Date | null; + busyAt?: Date | null; + errorAt?: Date | null; + currentState?: string; + jobCreationRequest?: SparkRequest; +} + +export interface SparkSessionOptions { + /** + * Dictionary of + */ + tags?: { [propertyName: string]: string }; + artifactId?: string; + name: string; + file?: string; + className?: string; + arguments?: string[]; + jars?: string[]; + pythonFiles?: string[]; + files?: string[]; + archives?: string[]; + /** + * Dictionary of + */ + configuration?: { [propertyName: string]: string }; + driverMemory?: string; + driverCores?: number; + executorMemory?: string; + executorCores?: number; + executorCount?: number; +} + +export interface SparkStatementCollection { + total: number; + statements?: SparkStatement[]; +} + +export interface SparkStatement { + id: number; + code?: string; + state?: string; + output?: SparkStatementOutput; +} + +export interface SparkStatementOutput { + status?: string; + executionCount: number; + /** + * Any object + */ + data?: any; + errorName?: string; + errorValue?: string; + traceback?: string[]; +} + +export interface SparkStatementOptions { + code?: string; + kind?: SparkStatementLanguageType; +} + +export interface SparkStatementCancellationResult { + msg?: string; +} + +/** + * Known values of {@link SparkJobType} that the service accepts. + */ +export const enum KnownSparkJobType { + SparkBatch = "SparkBatch", + SparkSession = "SparkSession" +} + +/** + * Defines values for SparkJobType. \ + * {@link KnownSparkJobType} can be used interchangeably with SparkJobType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **SparkBatch** \ + * **SparkSession** + */ +export type SparkJobType = string; + +/** + * Known values of {@link SparkBatchJobResultType} that the service accepts. + */ +export const enum KnownSparkBatchJobResultType { + Uncertain = "Uncertain", + Succeeded = "Succeeded", + Failed = "Failed", + Cancelled = "Cancelled" +} + +/** + * Defines values for SparkBatchJobResultType. \ + * {@link KnownSparkBatchJobResultType} can be used interchangeably with SparkBatchJobResultType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Uncertain** \ + * **Succeeded** \ + * **Failed** \ + * **Cancelled** + */ +export type SparkBatchJobResultType = string; + +/** + * Known values of {@link SchedulerCurrentState} that the service accepts. + */ +export const enum KnownSchedulerCurrentState { + Queued = "Queued", + Scheduled = "Scheduled", + Ended = "Ended" +} + +/** + * Defines values for SchedulerCurrentState. \ + * {@link KnownSchedulerCurrentState} can be used interchangeably with SchedulerCurrentState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Queued** \ + * **Scheduled** \ + * **Ended** + */ +export type SchedulerCurrentState = string; + +/** + * Known values of {@link PluginCurrentState} that the service accepts. + */ +export const enum KnownPluginCurrentState { + Preparation = "Preparation", + ResourceAcquisition = "ResourceAcquisition", + Queued = "Queued", + Submission = "Submission", + Monitoring = "Monitoring", + Cleanup = "Cleanup", + Ended = "Ended" +} + +/** + * Defines values for PluginCurrentState. \ + * {@link KnownPluginCurrentState} can be used interchangeably with PluginCurrentState, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Preparation** \ + * **ResourceAcquisition** \ + * **Queued** \ + * **Submission** \ + * **Monitoring** \ + * **Cleanup** \ + * **Ended** + */ +export type PluginCurrentState = string; + +/** + * Known values of {@link SparkErrorSource} that the service accepts. + */ +export const enum KnownSparkErrorSource { + System = "System", + User = "User", + Unknown = "Unknown", + Dependency = "Dependency" +} + +/** + * Defines values for SparkErrorSource. \ + * {@link KnownSparkErrorSource} can be used interchangeably with SparkErrorSource, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **System** \ + * **User** \ + * **Unknown** \ + * **Dependency** + */ +export type SparkErrorSource = string; + +/** + * Known values of {@link SparkSessionResultType} that the service accepts. + */ +export const enum KnownSparkSessionResultType { + Uncertain = "Uncertain", + Succeeded = "Succeeded", + Failed = "Failed", + Cancelled = "Cancelled" +} + +/** + * Defines values for SparkSessionResultType. \ + * {@link KnownSparkSessionResultType} can be used interchangeably with SparkSessionResultType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **Uncertain** \ + * **Succeeded** \ + * **Failed** \ + * **Cancelled** + */ +export type SparkSessionResultType = string; + +/** + * Known values of {@link SparkStatementLanguageType} that the service accepts. + */ +export const enum KnownSparkStatementLanguageType { + Spark = "spark", + Pyspark = "pyspark", + Dotnetspark = "dotnetspark", + Sql = "sql" +} + +/** + * Defines values for SparkStatementLanguageType. \ + * {@link KnownSparkStatementLanguageType} can be used interchangeably with SparkStatementLanguageType, + * this enum contains the known values that the service supports. + * ### Know values supported by the service + * **spark** \ + * **pyspark** \ + * **dotnetspark** \ + * **sql** + */ +export type SparkStatementLanguageType = string; + +/** + * Optional parameters. + */ +export interface SparkBatchGetSparkBatchJobsOptionalParams extends coreHttp.OperationOptions { + /** + * Optional param specifying which index the list should begin from. + */ + fromParam?: number; + /** + * Optional param specifying the size of the returned list. + * By default it is 20 and that is the maximum. + */ + size?: number; + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the getSparkBatchJobs operation. + */ +export type SparkBatchGetSparkBatchJobsResponse = SparkBatchJobCollection & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJobCollection; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkBatchCreateSparkBatchJobOptionalParams extends coreHttp.OperationOptions { + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the createSparkBatchJob operation. + */ +export type SparkBatchCreateSparkBatchJobResponse = SparkBatchJob & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJob; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkBatchGetSparkBatchJobOptionalParams extends coreHttp.OperationOptions { + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the getSparkBatchJob operation. + */ +export type SparkBatchGetSparkBatchJobResponse = SparkBatchJob & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkBatchJob; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkSessionGetSparkSessionsOptionalParams extends coreHttp.OperationOptions { + /** + * Optional param specifying which index the list should begin from. + */ + fromParam?: number; + /** + * Optional param specifying the size of the returned list. + * By default it is 20 and that is the maximum. + */ + size?: number; + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the getSparkSessions operation. + */ +export type SparkSessionGetSparkSessionsResponse = SparkSessionCollection & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkSessionCollection; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkSessionCreateSparkSessionOptionalParams extends coreHttp.OperationOptions { + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the createSparkSession operation. + */ +export type SparkSessionCreateSparkSessionResponse = SparkSession & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkSession; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkSessionGetSparkSessionOptionalParams extends coreHttp.OperationOptions { + /** + * Optional query param specifying whether detailed response is returned beyond plain livy. + */ + detailed?: boolean; +} + +/** + * Contains response data for the getSparkSession operation. + */ +export type SparkSessionGetSparkSessionResponse = SparkSession & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkSession; + }; +}; + +/** + * Contains response data for the getSparkStatements operation. + */ +export type SparkSessionGetSparkStatementsResponse = SparkStatementCollection & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkStatementCollection; + }; +}; + +/** + * Contains response data for the createSparkStatement operation. + */ +export type SparkSessionCreateSparkStatementResponse = SparkStatement & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkStatement; + }; +}; + +/** + * Contains response data for the getSparkStatement operation. + */ +export type SparkSessionGetSparkStatementResponse = SparkStatement & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkStatement; + }; +}; + +/** + * Contains response data for the cancelSparkStatement operation. + */ +export type SparkSessionCancelSparkStatementResponse = SparkStatementCancellationResult & { + /** + * The underlying HTTP response. + */ + _response: coreHttp.HttpResponse & { + /** + * The response body as text (string format) + */ + bodyAsText: string; + + /** + * The response body as parsed JSON or XML + */ + parsedBody: SparkStatementCancellationResult; + }; +}; + +/** + * Optional parameters. + */ +export interface SparkClientOptionalParams extends coreHttp.ServiceClientOptions { + /** + * Valid api-version for the request. + */ + livyApiVersion?: string; + /** + * Overrides client endpoint. + */ + endpoint?: string; +} diff --git a/sdk/synapse/synapse-spark/src/models/mappers.ts b/sdk/synapse/synapse-spark/src/models/mappers.ts new file mode 100644 index 000000000000..4b82776a4058 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/models/mappers.ts @@ -0,0 +1,1158 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; + +export const SparkBatchJobCollection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJobCollection", + modelProperties: { + from: { + serializedName: "from", + required: true, + type: { + name: "Number" + } + }, + total: { + serializedName: "total", + required: true, + type: { + name: "Number" + } + }, + sessions: { + serializedName: "sessions", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkBatchJob" + } + } + } + } + } + } +}; + +export const SparkBatchJob: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJob", + modelProperties: { + livyInfo: { + serializedName: "livyInfo", + type: { + name: "Composite", + className: "SparkBatchJobState" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + workspaceName: { + serializedName: "workspaceName", + type: { + name: "String" + } + }, + sparkPoolName: { + serializedName: "sparkPoolName", + type: { + name: "String" + } + }, + submitterName: { + serializedName: "submitterName", + type: { + name: "String" + } + }, + submitterId: { + serializedName: "submitterId", + type: { + name: "String" + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + jobType: { + serializedName: "jobType", + type: { + name: "String" + } + }, + result: { + serializedName: "result", + type: { + name: "String" + } + }, + scheduler: { + serializedName: "schedulerInfo", + type: { + name: "Composite", + className: "SparkScheduler" + } + }, + plugin: { + serializedName: "pluginInfo", + type: { + name: "Composite", + className: "SparkServicePlugin" + } + }, + errors: { + serializedName: "errorInfo", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkServiceError" + } + } + } + }, + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + id: { + serializedName: "id", + required: true, + type: { + name: "Number" + } + }, + appId: { + serializedName: "appId", + nullable: true, + type: { + name: "String" + } + }, + appInfo: { + serializedName: "appInfo", + nullable: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + state: { + serializedName: "state", + type: { + name: "String" + } + }, + logLines: { + serializedName: "log", + nullable: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const SparkBatchJobState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJobState", + modelProperties: { + notStartedAt: { + serializedName: "notStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + startingAt: { + serializedName: "startingAt", + nullable: true, + type: { + name: "DateTime" + } + }, + runningAt: { + serializedName: "runningAt", + nullable: true, + type: { + name: "DateTime" + } + }, + deadAt: { + serializedName: "deadAt", + nullable: true, + type: { + name: "DateTime" + } + }, + successAt: { + serializedName: "successAt", + nullable: true, + type: { + name: "DateTime" + } + }, + terminatedAt: { + serializedName: "killedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + recoveringAt: { + serializedName: "recoveringAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + }, + jobCreationRequest: { + serializedName: "jobCreationRequest", + type: { + name: "Composite", + className: "SparkRequest" + } + } + } + } +}; + +export const SparkRequest: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkRequest", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + file: { + serializedName: "file", + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + arguments: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + pythonFiles: { + serializedName: "pyFiles", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + configuration: { + serializedName: "conf", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + driverMemory: { + serializedName: "driverMemory", + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + type: { + name: "Number" + } + }, + executorCount: { + serializedName: "numExecutors", + type: { + name: "Number" + } + } + } + } +}; + +export const SparkScheduler: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkScheduler", + modelProperties: { + submittedAt: { + serializedName: "submittedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + scheduledAt: { + serializedName: "scheduledAt", + nullable: true, + type: { + name: "DateTime" + } + }, + endedAt: { + serializedName: "endedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + cancellationRequestedAt: { + serializedName: "cancellationRequestedAt", + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + } + } + } +}; + +export const SparkServicePlugin: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkServicePlugin", + modelProperties: { + preparationStartedAt: { + serializedName: "preparationStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + resourceAcquisitionStartedAt: { + serializedName: "resourceAcquisitionStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + submissionStartedAt: { + serializedName: "submissionStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + monitoringStartedAt: { + serializedName: "monitoringStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + cleanupStartedAt: { + serializedName: "cleanupStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + } + } + } +}; + +export const SparkServiceError: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkServiceError", + modelProperties: { + message: { + serializedName: "message", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "errorCode", + type: { + name: "String" + } + }, + source: { + serializedName: "source", + type: { + name: "String" + } + } + } + } +}; + +export const SparkBatchJobOptions: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkBatchJobOptions", + modelProperties: { + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + file: { + serializedName: "file", + required: true, + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + arguments: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + pythonFiles: { + serializedName: "pyFiles", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + configuration: { + serializedName: "conf", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + driverMemory: { + serializedName: "driverMemory", + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + type: { + name: "Number" + } + }, + executorCount: { + serializedName: "numExecutors", + type: { + name: "Number" + } + } + } + } +}; + +export const SparkSessionCollection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkSessionCollection", + modelProperties: { + from: { + serializedName: "from", + required: true, + type: { + name: "Number" + } + }, + total: { + serializedName: "total", + required: true, + type: { + name: "Number" + } + }, + sessions: { + serializedName: "sessions", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkSession" + } + } + } + } + } + } +}; + +export const SparkSession: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkSession", + modelProperties: { + livyInfo: { + serializedName: "livyInfo", + type: { + name: "Composite", + className: "SparkSessionState" + } + }, + name: { + serializedName: "name", + type: { + name: "String" + } + }, + workspaceName: { + serializedName: "workspaceName", + type: { + name: "String" + } + }, + sparkPoolName: { + serializedName: "sparkPoolName", + type: { + name: "String" + } + }, + submitterName: { + serializedName: "submitterName", + type: { + name: "String" + } + }, + submitterId: { + serializedName: "submitterId", + type: { + name: "String" + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + jobType: { + serializedName: "jobType", + type: { + name: "String" + } + }, + result: { + serializedName: "result", + type: { + name: "String" + } + }, + scheduler: { + serializedName: "schedulerInfo", + type: { + name: "Composite", + className: "SparkScheduler" + } + }, + plugin: { + serializedName: "pluginInfo", + type: { + name: "Composite", + className: "SparkServicePlugin" + } + }, + errors: { + serializedName: "errorInfo", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkServiceError" + } + } + } + }, + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + id: { + serializedName: "id", + required: true, + type: { + name: "Number" + } + }, + appId: { + serializedName: "appId", + nullable: true, + type: { + name: "String" + } + }, + appInfo: { + serializedName: "appInfo", + nullable: true, + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + state: { + serializedName: "state", + type: { + name: "String" + } + }, + logLines: { + serializedName: "log", + nullable: true, + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const SparkSessionState: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkSessionState", + modelProperties: { + notStartedAt: { + serializedName: "notStartedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + startingAt: { + serializedName: "startingAt", + nullable: true, + type: { + name: "DateTime" + } + }, + idleAt: { + serializedName: "idleAt", + nullable: true, + type: { + name: "DateTime" + } + }, + deadAt: { + serializedName: "deadAt", + nullable: true, + type: { + name: "DateTime" + } + }, + shuttingDownAt: { + serializedName: "shuttingDownAt", + nullable: true, + type: { + name: "DateTime" + } + }, + terminatedAt: { + serializedName: "killedAt", + nullable: true, + type: { + name: "DateTime" + } + }, + recoveringAt: { + serializedName: "recoveringAt", + nullable: true, + type: { + name: "DateTime" + } + }, + busyAt: { + serializedName: "busyAt", + nullable: true, + type: { + name: "DateTime" + } + }, + errorAt: { + serializedName: "errorAt", + nullable: true, + type: { + name: "DateTime" + } + }, + currentState: { + serializedName: "currentState", + type: { + name: "String" + } + }, + jobCreationRequest: { + serializedName: "jobCreationRequest", + type: { + name: "Composite", + className: "SparkRequest" + } + } + } + } +}; + +export const SparkSessionOptions: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkSessionOptions", + modelProperties: { + tags: { + serializedName: "tags", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + artifactId: { + serializedName: "artifactId", + type: { + name: "String" + } + }, + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + file: { + serializedName: "file", + type: { + name: "String" + } + }, + className: { + serializedName: "className", + type: { + name: "String" + } + }, + arguments: { + serializedName: "args", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + jars: { + serializedName: "jars", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + pythonFiles: { + serializedName: "pyFiles", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + files: { + serializedName: "files", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + archives: { + serializedName: "archives", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + configuration: { + serializedName: "conf", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + driverMemory: { + serializedName: "driverMemory", + type: { + name: "String" + } + }, + driverCores: { + serializedName: "driverCores", + type: { + name: "Number" + } + }, + executorMemory: { + serializedName: "executorMemory", + type: { + name: "String" + } + }, + executorCores: { + serializedName: "executorCores", + type: { + name: "Number" + } + }, + executorCount: { + serializedName: "numExecutors", + type: { + name: "Number" + } + } + } + } +}; + +export const SparkStatementCollection: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatementCollection", + modelProperties: { + total: { + serializedName: "total_statements", + required: true, + type: { + name: "Number" + } + }, + statements: { + serializedName: "statements", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SparkStatement" + } + } + } + } + } + } +}; + +export const SparkStatement: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatement", + modelProperties: { + id: { + serializedName: "id", + required: true, + type: { + name: "Number" + } + }, + code: { + serializedName: "code", + type: { + name: "String" + } + }, + state: { + serializedName: "state", + type: { + name: "String" + } + }, + output: { + serializedName: "output", + type: { + name: "Composite", + className: "SparkStatementOutput" + } + } + } + } +}; + +export const SparkStatementOutput: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatementOutput", + modelProperties: { + status: { + serializedName: "status", + type: { + name: "String" + } + }, + executionCount: { + serializedName: "execution_count", + required: true, + type: { + name: "Number" + } + }, + data: { + serializedName: "data", + type: { + name: "any" + } + }, + errorName: { + serializedName: "ename", + type: { + name: "String" + } + }, + errorValue: { + serializedName: "evalue", + type: { + name: "String" + } + }, + traceback: { + serializedName: "traceback", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const SparkStatementOptions: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatementOptions", + modelProperties: { + code: { + serializedName: "code", + type: { + name: "String" + } + }, + kind: { + serializedName: "kind", + type: { + name: "String" + } + } + } + } +}; + +export const SparkStatementCancellationResult: coreHttp.CompositeMapper = { + type: { + name: "Composite", + className: "SparkStatementCancellationResult", + modelProperties: { + msg: { + serializedName: "msg", + type: { + name: "String" + } + } + } + } +}; diff --git a/sdk/synapse/synapse-spark/src/models/parameters.ts b/sdk/synapse/synapse-spark/src/models/parameters.ts new file mode 100644 index 000000000000..eb4a77d11aab --- /dev/null +++ b/sdk/synapse/synapse-spark/src/models/parameters.ts @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { + OperationParameter, + OperationURLParameter, + OperationQueryParameter +} from "@azure/core-http"; +import { + SparkBatchJobOptions as SparkBatchJobOptionsMapper, + SparkSessionOptions as SparkSessionOptionsMapper, + SparkStatementOptions as SparkStatementOptionsMapper +} from "../models/mappers"; + +export const accept: OperationParameter = { + parameterPath: "accept", + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; + +export const endpoint: OperationURLParameter = { + parameterPath: "endpoint", + mapper: { + serializedName: "endpoint", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const livyApiVersion: OperationURLParameter = { + parameterPath: "livyApiVersion", + mapper: { + serializedName: "livyApiVersion", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const sparkPoolName: OperationURLParameter = { + parameterPath: "sparkPoolName", + mapper: { + serializedName: "sparkPoolName", + required: true, + type: { + name: "String" + } + }, + skipEncoding: true +}; + +export const fromParam: OperationQueryParameter = { + parameterPath: ["options", "fromParam"], + mapper: { + serializedName: "from", + type: { + name: "Number" + } + } +}; + +export const size: OperationQueryParameter = { + parameterPath: ["options", "size"], + mapper: { + serializedName: "size", + type: { + name: "Number" + } + } +}; + +export const detailed: OperationQueryParameter = { + parameterPath: ["options", "detailed"], + mapper: { + serializedName: "detailed", + type: { + name: "Boolean" + } + } +}; + +export const contentType: OperationParameter = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/json", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; + +export const sparkBatchJobOptions: OperationParameter = { + parameterPath: "sparkBatchJobOptions", + mapper: SparkBatchJobOptionsMapper +}; + +export const batchId: OperationURLParameter = { + parameterPath: "batchId", + mapper: { + serializedName: "batchId", + required: true, + type: { + name: "Number" + } + } +}; + +export const sparkSessionOptions: OperationParameter = { + parameterPath: "sparkSessionOptions", + mapper: SparkSessionOptionsMapper +}; + +export const sessionId: OperationURLParameter = { + parameterPath: "sessionId", + mapper: { + serializedName: "sessionId", + required: true, + type: { + name: "Number" + } + } +}; + +export const sparkStatementOptions: OperationParameter = { + parameterPath: "sparkStatementOptions", + mapper: SparkStatementOptionsMapper +}; + +export const statementId: OperationURLParameter = { + parameterPath: "statementId", + mapper: { + serializedName: "statementId", + required: true, + type: { + name: "Number" + } + } +}; diff --git a/sdk/synapse/synapse-spark/src/operations/index.ts b/sdk/synapse/synapse-spark/src/operations/index.ts new file mode 100644 index 000000000000..e627dd34f3db --- /dev/null +++ b/sdk/synapse/synapse-spark/src/operations/index.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./sparkBatch"; +export * from "./sparkSession"; diff --git a/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts new file mode 100644 index 000000000000..1fbb4dcf44d5 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/operations/sparkBatch.ts @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { SparkClient } from "../sparkClient"; +import { + SparkBatchGetSparkBatchJobsOptionalParams, + SparkBatchGetSparkBatchJobsResponse, + SparkBatchJobOptions, + SparkBatchCreateSparkBatchJobOptionalParams, + SparkBatchCreateSparkBatchJobResponse, + SparkBatchGetSparkBatchJobOptionalParams, + SparkBatchGetSparkBatchJobResponse +} from "../models"; + +/** + * Class representing a SparkBatch. + */ +export class SparkBatch { + private readonly client: SparkClient; + + /** + * Initialize a new instance of the class SparkBatch class. + * @param client Reference to the service client + */ + constructor(client: SparkClient) { + this.client = client; + } + + /** + * List all spark batch jobs which are running under a particular spark pool. + * @param options The options parameters. + */ + async getSparkBatchJobs( + options?: SparkBatchGetSparkBatchJobsOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkBatchJobs", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkBatchJobsOperationSpec + ); + return result as SparkBatchGetSparkBatchJobsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Create new spark batch job. + * @param sparkBatchJobOptions Livy compatible batch job request payload. + * @param options The options parameters. + */ + async createSparkBatchJob( + sparkBatchJobOptions: SparkBatchJobOptions, + options?: SparkBatchCreateSparkBatchJobOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-createSparkBatchJob", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sparkBatchJobOptions, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createSparkBatchJobOperationSpec + ); + return result as SparkBatchCreateSparkBatchJobResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Gets a single spark batch job. + * @param batchId Identifier for the batch job. + * @param options The options parameters. + */ + async getSparkBatchJob( + batchId: number, + options?: SparkBatchGetSparkBatchJobOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkBatchJob", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + batchId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkBatchJobOperationSpec + ); + return result as SparkBatchGetSparkBatchJobResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Cancels a running spark batch job. + * @param batchId Identifier for the batch job. + * @param options The options parameters. + */ + async cancelSparkBatchJob( + batchId: number, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-cancelSparkBatchJob", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + batchId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelSparkBatchJobOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSparkBatchJobsOperationSpec: coreHttp.OperationSpec = { + path: "/batches", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJobCollection + } + }, + queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + headerParameters: [Parameters.accept], + serializer +}; +const createSparkBatchJobOperationSpec: coreHttp.OperationSpec = { + path: "/batches", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJob + } + }, + requestBody: Parameters.sparkBatchJobOptions, + queryParameters: [Parameters.detailed], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkBatchJobOperationSpec: coreHttp.OperationSpec = { + path: "/batches/{batchId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkBatchJob + } + }, + queryParameters: [Parameters.detailed], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.batchId + ], + headerParameters: [Parameters.accept], + serializer +}; +const cancelSparkBatchJobOperationSpec: coreHttp.OperationSpec = { + path: "/batches/{batchId}", + httpMethod: "DELETE", + responses: { 200: {} }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.batchId + ], + serializer +}; diff --git a/sdk/synapse/synapse-spark/src/operations/sparkSession.ts b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts new file mode 100644 index 000000000000..bc78e424cf71 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/operations/sparkSession.ts @@ -0,0 +1,495 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CanonicalCode } from "@opentelemetry/api"; +import { createSpan } from "../tracing"; +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +import { SparkClient } from "../sparkClient"; +import { + SparkSessionGetSparkSessionsOptionalParams, + SparkSessionGetSparkSessionsResponse, + SparkSessionOptions, + SparkSessionCreateSparkSessionOptionalParams, + SparkSessionCreateSparkSessionResponse, + SparkSessionGetSparkSessionOptionalParams, + SparkSessionGetSparkSessionResponse, + SparkSessionGetSparkStatementsResponse, + SparkStatementOptions, + SparkSessionCreateSparkStatementResponse, + SparkSessionGetSparkStatementResponse, + SparkSessionCancelSparkStatementResponse +} from "../models"; + +/** + * Class representing a SparkSession. + */ +export class SparkSession { + private readonly client: SparkClient; + + /** + * Initialize a new instance of the class SparkSession class. + * @param client Reference to the service client + */ + constructor(client: SparkClient) { + this.client = client; + } + + /** + * List all spark sessions which are running under a particular spark pool. + * @param options The options parameters. + */ + async getSparkSessions( + options?: SparkSessionGetSparkSessionsOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkSessions", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkSessionsOperationSpec + ); + return result as SparkSessionGetSparkSessionsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Create new spark session. + * @param sparkSessionOptions Livy compatible batch job request payload. + * @param options The options parameters. + */ + async createSparkSession( + sparkSessionOptions: SparkSessionOptions, + options?: SparkSessionCreateSparkSessionOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-createSparkSession", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sparkSessionOptions, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createSparkSessionOperationSpec + ); + return result as SparkSessionCreateSparkSessionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Gets a single spark session. + * @param sessionId Identifier for the session. + * @param options The options parameters. + */ + async getSparkSession( + sessionId: number, + options?: SparkSessionGetSparkSessionOptionalParams + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkSession", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sessionId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkSessionOperationSpec + ); + return result as SparkSessionGetSparkSessionResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Cancels a running spark session. + * @param sessionId Identifier for the session. + * @param options The options parameters. + */ + async cancelSparkSession( + sessionId: number, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-cancelSparkSession", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sessionId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelSparkSessionOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Sends a keep alive call to the current session to reset the session timeout. + * @param sessionId Identifier for the session. + * @param options The options parameters. + */ + async resetSparkSessionTimeout( + sessionId: number, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-resetSparkSessionTimeout", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sessionId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + resetSparkSessionTimeoutOperationSpec + ); + return result as coreHttp.RestResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Gets a list of statements within a spark session. + * @param sessionId Identifier for the session. + * @param options The options parameters. + */ + async getSparkStatements( + sessionId: number, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkStatements", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sessionId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkStatementsOperationSpec + ); + return result as SparkSessionGetSparkStatementsResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Create statement within a spark session. + * @param sessionId Identifier for the session. + * @param sparkStatementOptions Livy compatible batch job request payload. + * @param options The options parameters. + */ + async createSparkStatement( + sessionId: number, + sparkStatementOptions: SparkStatementOptions, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-createSparkStatement", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sessionId, + sparkStatementOptions, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + createSparkStatementOperationSpec + ); + return result as SparkSessionCreateSparkStatementResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Gets a single statement within a spark session. + * @param sessionId Identifier for the session. + * @param statementId Identifier for the statement. + * @param options The options parameters. + */ + async getSparkStatement( + sessionId: number, + statementId: number, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-getSparkStatement", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sessionId, + statementId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + getSparkStatementOperationSpec + ); + return result as SparkSessionGetSparkStatementResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } + + /** + * Kill a statement within a session. + * @param sessionId Identifier for the session. + * @param statementId Identifier for the statement. + * @param options The options parameters. + */ + async cancelSparkStatement( + sessionId: number, + statementId: number, + options?: coreHttp.OperationOptions + ): Promise { + const { span, updatedOptions } = createSpan( + "SparkClient-cancelSparkStatement", + coreHttp.operationOptionsToRequestOptionsBase(options || {}) + ); + const operationArguments: coreHttp.OperationArguments = { + sessionId, + statementId, + options: updatedOptions + }; + try { + const result = await this.client.sendOperationRequest( + operationArguments, + cancelSparkStatementOperationSpec + ); + return result as SparkSessionCancelSparkStatementResponse; + } catch (error) { + span.setStatus({ + code: CanonicalCode.UNKNOWN, + message: error.message + }); + throw error; + } finally { + span.end(); + } + } +} +// Operation Specifications + +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); + +const getSparkSessionsOperationSpec: coreHttp.OperationSpec = { + path: "/sessions", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkSessionCollection + } + }, + queryParameters: [Parameters.fromParam, Parameters.size, Parameters.detailed], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + headerParameters: [Parameters.accept], + serializer +}; +const createSparkSessionOperationSpec: coreHttp.OperationSpec = { + path: "/sessions", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkSession + } + }, + requestBody: Parameters.sparkSessionOptions, + queryParameters: [Parameters.detailed], + urlParameters: [Parameters.endpoint, Parameters.livyApiVersion, Parameters.sparkPoolName], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkSessionOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkSession + } + }, + queryParameters: [Parameters.detailed], + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + headerParameters: [Parameters.accept], + serializer +}; +const cancelSparkSessionOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}", + httpMethod: "DELETE", + responses: { 200: {} }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + serializer +}; +const resetSparkSessionTimeoutOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/reset-timeout", + httpMethod: "PUT", + responses: { 200: {} }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + serializer +}; +const getSparkStatementsOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/statements", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkStatementCollection + } + }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + headerParameters: [Parameters.accept], + serializer +}; +const createSparkStatementOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/statements", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkStatement + } + }, + requestBody: Parameters.sparkStatementOptions, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId + ], + headerParameters: [Parameters.accept, Parameters.contentType], + mediaType: "json", + serializer +}; +const getSparkStatementOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/statements/{statementId}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.SparkStatement + } + }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId, + Parameters.statementId + ], + headerParameters: [Parameters.accept], + serializer +}; +const cancelSparkStatementOperationSpec: coreHttp.OperationSpec = { + path: "/sessions/{sessionId}/statements/{statementId}/cancel", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.SparkStatementCancellationResult + } + }, + urlParameters: [ + Parameters.endpoint, + Parameters.livyApiVersion, + Parameters.sparkPoolName, + Parameters.sessionId, + Parameters.statementId + ], + headerParameters: [Parameters.accept], + serializer +}; diff --git a/sdk/synapse/synapse-spark/src/sparkClient.ts b/sdk/synapse/synapse-spark/src/sparkClient.ts new file mode 100644 index 000000000000..e456808dcff2 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/sparkClient.ts @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { SparkBatch, SparkSession } from "./operations"; +import { SparkClientContext } from "./sparkClientContext"; +import { SparkClientOptionalParams } from "./models"; + +export class SparkClient extends SparkClientContext { + /** + * Initializes a new instance of the SparkClient class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param sparkPoolName Name of the spark pool. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + sparkPoolName: string, + options?: SparkClientOptionalParams + ) { + super(credentials, endpoint, sparkPoolName, options); + this.sparkBatch = new SparkBatch(this); + this.sparkSession = new SparkSession(this); + } + + sparkBatch: SparkBatch; + sparkSession: SparkSession; +} diff --git a/sdk/synapse/synapse-spark/src/sparkClientContext.ts b/sdk/synapse/synapse-spark/src/sparkClientContext.ts new file mode 100644 index 000000000000..cb070140f975 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/sparkClientContext.ts @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as coreHttp from "@azure/core-http"; +import { SparkClientOptionalParams } from "./models"; + +const packageName = "@azure/synapse-spark"; +const packageVersion = "1.0.0"; + +export class SparkClientContext extends coreHttp.ServiceClient { + endpoint: string; + livyApiVersion: string; + sparkPoolName: string; + + /** + * Initializes a new instance of the SparkClientContext class. + * @param credentials Subscription credentials which uniquely identify client subscription. + * @param endpoint The workspace development endpoint, for example + * https://myworkspace.dev.azuresynapse.net. + * @param sparkPoolName Name of the spark pool. + * @param options The parameter options + */ + constructor( + credentials: coreHttp.TokenCredential | coreHttp.ServiceClientCredentials, + endpoint: string, + sparkPoolName: string, + options?: SparkClientOptionalParams + ) { + if (credentials === undefined) { + throw new Error("'credentials' cannot be null"); + } + if (endpoint === undefined) { + throw new Error("'endpoint' cannot be null"); + } + if (sparkPoolName === undefined) { + throw new Error("'sparkPoolName' cannot be null"); + } + + // Initializing default values for options + if (!options) { + options = {}; + } + + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + + if (!options.credentialScopes) { + options.credentialScopes = ["https://dev.azuresynapse.net/.default"]; + } + + super(credentials, options); + + this.requestContentType = "application/json; charset=utf-8"; + + this.baseUri = + options.endpoint || "{endpoint}/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}"; + + // Parameter assignments + this.endpoint = endpoint; + this.sparkPoolName = sparkPoolName; + + // Assigning values to Constant parameters + this.livyApiVersion = options.livyApiVersion || "2019-11-01-preview"; + } +} diff --git a/sdk/synapse/synapse-spark/src/tracing.ts b/sdk/synapse/synapse-spark/src/tracing.ts new file mode 100644 index 000000000000..08ce4428a831 --- /dev/null +++ b/sdk/synapse/synapse-spark/src/tracing.ts @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createSpanFunction } from "@azure/core-http"; + +export const createSpan = createSpanFunction({ + namespace: "Azure.Synapse.Spark", + packagePrefix: "Microsoft.Synapse" +}); diff --git a/sdk/synapse/synapse-spark/tsconfig.json b/sdk/synapse/synapse-spark/tsconfig.json new file mode 100644 index 000000000000..d43efedfc9bc --- /dev/null +++ b/sdk/synapse/synapse-spark/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "es6", + "moduleResolution": "node", + "strict": true, + "target": "es5", + "sourceMap": true, + "declarationMap": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "preserveConstEnums": true, + "lib": ["es6", "dom"], + "declaration": true, + "outDir": "./dist-esm", + "importHelpers": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +}