From b02acd2c3e6f83892f525b97799f71baf8871b5a Mon Sep 17 00:00:00 2001 From: hivyas <61890270+hivyas@users.noreply.github.com> Date: Fri, 29 Oct 2021 15:48:25 -0700 Subject: [PATCH] [AVA] Updated package to 1.1, moved to v2 samples flow, moved to core-rest-pipeline (#18425) * updated package to 1.1 * updating table in readme * fixing product slugs * updating slugs and test * updating slugs * changing slugs * fixing formatting --- common/config/rush/pnpm-lock.yaml | 4 +- package.json | 2 +- .../video-analyzer-edge/CHANGELOG.md | 6 +- .../video-analyzer-edge/README.md | 104 +-- .../video-analyzer-edge/package.json | 24 +- .../review/video-analyzer-edge.api.md | 266 ++++++- .../lvaInvokeModuleSample.ts | 100 ++- .../samples/javascript/README.md | 32 - .../video-analyzer-edge/samples/tsconfig.json | 10 - .../samples/typescript/README.md | 32 - .../samples/typescript/package.json | 37 - .../samples/v1/javascript/README.md | 62 ++ .../javascript/lvaInvokeModuleSample.js} | 94 ++- .../samples/{ => v1}/javascript/package.json | 22 +- .../samples/{ => v1}/javascript/sample.env | 8 +- .../samples/v1/typescript/README.md | 75 ++ .../samples/v1/typescript/package.json | 38 + .../samples/v1/typescript/sample.env | 17 + .../typescript/src/lvaInvokeModuleSample.ts | 207 +++++ .../samples/{ => v1}/typescript/tsconfig.json | 11 +- .../src/generated/models/index.ts | 347 ++++++++- .../src/generated/models/mappers.ts | 710 ++++++++++++++++-- .../src/generated/models/parameters.ts | 2 +- .../video-analyzer-edge/src/index.ts | 81 +- .../video-analyzer-edge/swagger/README.md | 12 +- .../test/payloadCreation.spec.ts | 19 +- .../video-analyzer-edge/tsconfig.json | 16 +- 27 files changed, 1954 insertions(+), 384 deletions(-) rename sdk/videoanalyzer/video-analyzer-edge/{samples/typescript/src => samples-dev}/lvaInvokeModuleSample.ts (59%) delete mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/javascript/README.md delete mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/tsconfig.json delete mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/typescript/README.md delete mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/typescript/package.json create mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/README.md rename sdk/videoanalyzer/video-analyzer-edge/samples/{javascript/lvaInvokeModule.js => v1/javascript/lvaInvokeModuleSample.js} (59%) rename sdk/videoanalyzer/video-analyzer-edge/samples/{ => v1}/javascript/package.json (56%) rename sdk/videoanalyzer/video-analyzer-edge/samples/{ => v1}/javascript/sample.env (70%) create mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/README.md create mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/package.json create mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/sample.env create mode 100644 sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/src/lvaInvokeModuleSample.ts rename sdk/videoanalyzer/video-analyzer-edge/samples/{ => v1}/typescript/tsconfig.json (65%) diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index 2b7484ac5575..bfbca14b96c7 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -13545,7 +13545,7 @@ packages: dev: false file:projects/video-analyzer-edge.tgz: - resolution: {integrity: sha512-Cl4jjmfkJuO8NZ72KUrRa1bQlcQGLYRpJcCwlmQY3LIhRTDAz0eJYbSiAjzq0S15LzmCw/2mzVoMK6kP+vgNvQ==, tarball: file:projects/video-analyzer-edge.tgz} + resolution: {integrity: sha512-9+t+mAcyAqPYLyp4a41bdqOppIV2die1AY4A2bP/szdpcSfp+AA/ZQc2kyfHWBB5fVXlfHI75WYq687WY8qxcQ==, tarball: file:projects/video-analyzer-edge.tgz} name: '@rush-temp/video-analyzer-edge' version: 0.0.0 dependencies: @@ -13554,7 +13554,7 @@ packages: '@types/chai': 4.2.22 '@types/chai-as-promised': 7.1.4 '@types/mocha': 7.0.2 - '@types/node': 12.20.34 + '@types/node': 12.20.33 azure-iothub: 1.14.6 chai: 4.3.4 chai-as-promised: 7.1.1_chai@4.3.4 diff --git a/package.json b/package.json index 69f616ed127b..65b9a546e17a 100644 --- a/package.json +++ b/package.json @@ -55,7 +55,7 @@ "prettier": "^1.16.4", "ts-node": "^7.0.1", "tslib": "1.11.2", - "typescript": "^3.2.2", + "typescript": "^3.9.10", "yargs": "^11.0.0" }, "engines": { diff --git a/sdk/videoanalyzer/video-analyzer-edge/CHANGELOG.md b/sdk/videoanalyzer/video-analyzer-edge/CHANGELOG.md index 2a87d16522a3..ac5c14745132 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/CHANGELOG.md +++ b/sdk/videoanalyzer/video-analyzer-edge/CHANGELOG.md @@ -1,7 +1,9 @@ # Release History +## 1.0.0-beta.3 (2021-11-01) -## 1.0.0-beta.3 (Unreleased) - +- Added device discovery and device detail request for ONVIF enabled devices. +- Added Remote Device Adapter configuration for ingesting video in a private network +- Added retention policy to VideoSink ## 1.0.0-beta.2 (2021-05-28) diff --git a/sdk/videoanalyzer/video-analyzer-edge/README.md b/sdk/videoanalyzer/video-analyzer-edge/README.md index 7df76d98bcaa..ba9d779adafa 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/README.md +++ b/sdk/videoanalyzer/video-analyzer-edge/README.md @@ -1,6 +1,6 @@ # Azure Video Analyzer Edge client library for JavaScript -Azure Video Analyzer provides a platform to build intelligent video applications that span the edge and the cloud. The platform offers the capability to capture, record, and analyze live video along with publishing the results, video and video analytics, to Azure services in the cloud or the edge. It is designed to be an extensible platform, enabling you to connect different video analysis edge modules such as Cognitive services containers, custom edge modules built by you with open source machine learning models or custom models trained with your own data. You can then use them to analyze live video without worrying about the complexity of building and running a live video pipeline. +Azure Video Analyzer is an [Azure Applied AI Service][applied-ai-service] that provides a platform for you to build intelligent video applications that can span both edge and cloud infrastructures. The platform offers the capability to capture, record, and analyze live video along with publishing the results, video and video analytics, to Azure services at the edge or in the cloud. It is designed to be an extensible platform, enabling you to connect different video inferencing edge modules such as Cognitive services modules, or custom inferencing modules that have been trained with your own data using either open-source machine learning or [Azure Machine Learning][machine-learning]. Use the client library for Video Analyzer Edge to: @@ -31,7 +31,9 @@ npm install @azure/video-analyzer-edge | SDK | Video Analyzer edge module | | ------------ | -------------------------- | - | 1.0.0-beta.x | 1.0 | + | 1.0.0-beta.3 | 1.1 | + | 1.0.0-beta.2 | 1.0 | + | 1.0.0-beta.1 | 1.0 | ### Creating a pipeline topology and making requests @@ -53,44 +55,47 @@ To create a pipeline topology you need to define sources and sinks. ```typescript const rtspSource: RtspSource = { - name: "rtspSource", - endpoint: { - url: "${rtspUrl}", - "@type": "#Microsoft.VideoAnalyzer.UnsecuredEndpoint", - credentials: { - username: "${rtspUserName}", - password: "${rtspPassword}", - "@type": "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials" - } - } as UnsecuredEndpoint, - "@type": "#Microsoft.VideoAnalyzer.RtspSource" - }; - - const nodeInput: NodeInput = { - nodeName: "rtspSource" - }; - - const msgSink: IotHubMessageSink = { - name: "msgSink", - inputs: [nodeInput], - hubOutputName: "${hubSinkOutputName}", - "@type": "#Microsoft.VideoAnalyzer.IotHubMessageSink" - }; - - const pipelineTopology: PipelineTopology = { - name: "jsTestTopology", - properties: { - description: "Continuous video recording to a Video Analyzer video", - parameters: [ - { name: "rtspUserName", type: "String", default: "dummyUsername" }, - { name: "rtspPassword", type: "SecretString", default: "dummyPassword" }, - { name: "rtspUrl", type: "String" } - { name: "hubSinkOutputName", type: "String" } - ], - sources: [rtspSource], - sinks: [msgSink] + //Create a source for your pipeline topology + name: "rtspSource", + endpoint: { + url: "${rtspUrl}", + "@type": "#Microsoft.VideoAnalyzer.UnsecuredEndpoint", + credentials: { + username: "${rtspUserName}", + password: "${rtspPassword}", + "@type": "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials" } - }; + } as UnsecuredEndpoint, + "@type": "#Microsoft.VideoAnalyzer.RtspSource" +}; + +const nodeInput: NodeInput = { + //Create an input for your sink + nodeName: "rtspSource" +}; + +const videoSink: VideoSink = { + name: "videoSink", + inputs: [nodeInput], + videoName: "video", + localMediaCachePath: "/var/lib/videoanalyzer/tmp/", + localMediaCacheMaximumSizeMiB: "1024", + "@type": "#Microsoft.VideoAnalyzer.VideoSink" +} + +const pipelineTopology: PipelineTopology = { + name: "jsTestTopology", + properties: { + description: "description for jsTestTopology", + parameters: [ + { name: "rtspUserName", type: "String", default: "testUsername" }, + { name: "rtspPassword", type: "SecretString", default: "testPassword" }, + { name: "rtspUrl", type: "String" }, + ], + sources: [rtspSource], + sinks: [videoSink] + } +}; ``` @@ -100,10 +105,10 @@ To create a live pipeline instance, you need to have an existing pipeline topolo ```typescript const livePipeline: LivePipeline = { - name: pipelineTopologyName, + name: "jsLivePipelineTest", properties: { - description: "Continuous video recording to a Video Analyzer video", - topologyName: "jsTestTopology", + description: "description", + topologyName: pipelineTopologyName, parameters: [{ name: "rtspUrl", value: "rtsp://sample.com" }] } }; @@ -117,15 +122,16 @@ To invoke a direct method on your device you need to first define the request us import { createRequest } from "@azure/video-analyzer-edge"; import { Client } from "azure-iothub"; -const deviceId = "lva-sample-device"; -const moduleId = "mediaEdge"; -const connectionString = "connectionString"; -const iotHubClient = Client.fromConnectionString(connectionString); +const deviceId = process.env.iothub_deviceid; +const moduleId = process.env.iothub_moduleid; +const connectionString = process.env.iothub_connectionstring; +const iotHubClient = Client.fromConnectionString(connectionString); //Connect to your IoT Hub + const pipelineTopologySetRequest = createRequest("pipelineTopologySet", pipelineTopology); const setPipelineTopResponse = await iotHubClient.invokeDeviceMethod(deviceId, moduleId, { - methodName: pipelineTopologySetRequest.methodName, - payload: pipelineTopologySetRequest.payload -}); + methodName: pipelineTopologySetRequest.methodName, + payload: pipelineTopologySetRequest.payload + }); ``` ## Troubleshooting diff --git a/sdk/videoanalyzer/video-analyzer-edge/package.json b/sdk/videoanalyzer/video-analyzer-edge/package.json index 950aa9c1192b..b16c876b1c01 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/package.json +++ b/sdk/videoanalyzer/video-analyzer-edge/package.json @@ -11,14 +11,14 @@ "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", "build:browser": "tsc -p . && cross-env ONLY_BROWSER=true rollup -c 2>&1", "build:node": "tsc -p . && cross-env ONLY_NODE=true rollup -c 2>&1", - "build:samples": "dev-tool samples prep && cd dist-samples && tsc -p .", + "build:samples": "echo Obsolete.", "build:test": "tsc -p . && rollup -c 2>&1", "build": "npm run clean && tsc -p . && rollup -c 2>&1 && api-extractor run --local", - "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", "clean": "rimraf dist dist-* test-dist temp types *.tgz *.log", - "execute:samples": "npm run build:samples && dev-tool samples run dist-samples/javascript dist-samples/typescript/dist/dist-samples/typescript/src/", + "execute:samples": "dev-tool samples run samples-dev", "extract-api": "tsc -p . && api-extractor run --local", - "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", "integration-test:browser": "karma start --single-run", "integration-test:node": "nyc mocha -r esm --require source-map-support/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 5000000 --full-trace \"dist-esm/test/{,!(browser)/**/}/*.spec.js\"", "integration-test": "npm run integration-test:node && npm run integration-test:browser", @@ -59,11 +59,12 @@ "prettier": "@azure/eslint-plugin-azure-sdk/prettier.json", "dependencies": { "@azure/core-auth": "^1.3.0", - "@azure/core-http": "^2.0.0", "@azure/core-tracing": "1.0.0-preview.13", "@azure/logger": "^1.0.0", "events": "^3.0.0", - "tslib": "^2.2.0" + "tslib": "^2.2.0", + "@azure/core-client": "^1.0.0", + "@azure/core-rest-pipeline": "^1.1.0" }, "devDependencies": { "@azure/dev-tool": "^1.0.0", @@ -99,9 +100,18 @@ "typescript": "~4.2.0", "util": "^0.12.1", "typedoc": "0.15.2", - "azure-iothub": "^1.13.1" + "azure-iothub": "^1.14.6" }, "//smokeTestConfiguration": { "skipFolder": true + }, + "//sampleConfiguration": { + "productName": "Azure Video Analyzer", + "productSlugs": [ + "azure" + ], + "requiredResources": { + "Azure IoT Hub account": "https://docs.microsoft.com/azure/iot-hub/iot-hub-create-through-portal" + } } } diff --git a/sdk/videoanalyzer/video-analyzer-edge/review/video-analyzer-edge.api.md b/sdk/videoanalyzer/video-analyzer-edge/review/video-analyzer-edge.api.md index 03fff73ed557..4769a977d67e 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/review/video-analyzer-edge.api.md +++ b/sdk/videoanalyzer/video-analyzer-edge/review/video-analyzer-edge.api.md @@ -4,7 +4,7 @@ ```ts -import * as coreHttp from '@azure/core-http'; +import * as coreClient from '@azure/core-client'; // @public export interface CertificateSource { @@ -24,42 +24,73 @@ export type CognitiveServicesVisionProcessor = ProcessorNodeBase & { }; // @public -export function createRequest(request: "pipelineTopologySet", payload: PipelineTopology): Request_2; +export function createRequest(request: "pipelineTopologySet", payload: PipelineTopology): Request; // @public -export function createRequest(request: "pipelineTopologyGet", payload: string): Request_2; +export function createRequest(request: "pipelineTopologyGet", payload: string): Request; // @public -export function createRequest(request: "pipelineTopologyList"): Request_2; +export function createRequest(request: "pipelineTopologyList"): Request; // @public -export function createRequest(request: "pipelineTopologyDelete", payload: string): Request_2; +export function createRequest(request: "pipelineTopologyDelete", payload: string): Request; // @public -export function createRequest(request: "livePipelineSet", payload: LivePipeline): Request_2; +export function createRequest(request: "livePipelineSet", payload: LivePipeline): Request; // @public -export function createRequest(request: "livePipelineGet", payload: string): Request_2; +export function createRequest(request: "livePipelineGet", payload: string): Request; // @public -export function createRequest(request: "livePipelineList"): Request_2; +export function createRequest(request: "livePipelineList"): Request; // @public -export function createRequest(request: "livePipelineDelete", payload: string): Request_2; +export function createRequest(request: "livePipelineDelete", payload: string): Request; // @public -export function createRequest(request: "livePipelineActivate", payload: string): Request_2; +export function createRequest(request: "livePipelineActivate", payload: string): Request; // @public -export function createRequest(request: "livePipelineDeactivate", payload: string): Request_2; +export function createRequest(request: "livePipelineDeactivate", payload: string): Request; + +// @public +export function createRequest(request: "onvifDeviceDiscover"): Request; + +// @public +export function createRequest(request: "onvifDeviceGet", payload: UnsecuredEndpoint): Request; + +// @public +export function createRequest(request: "remoteDeviceAdapterSet", payload: RemoteDeviceAdapter): Request; + +// @public +export function createRequest(request: "remoteDeviceAdapterList"): Request; + +// @public +export function createRequest(request: "remoteDeviceAdapterGet", payload: string): Request; + +// @public +export function createRequest(request: "remoteDeviceAdapterDelete", payload: string): Request; // @public export interface CredentialsBase { - "@type": "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials" | "#Microsoft.VideoAnalyzer.HttpHeaderCredentials"; + "@type": "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials" | "#Microsoft.VideoAnalyzer.HttpHeaderCredentials" | "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials"; } // @public (undocumented) -export type CredentialsBaseUnion = CredentialsBase | UsernamePasswordCredentials | HttpHeaderCredentials; +export type CredentialsBaseUnion = CredentialsBase | UsernamePasswordCredentials | HttpHeaderCredentials | SymmetricKeyCredentials; + +// @public +export interface DiscoveredOnvifDevice { + endpoints?: string[]; + remoteIPAddress?: string; + scopes?: string[]; + serviceIdentifier?: string; +} + +// @public +export interface DiscoveredOnvifDeviceCollection { + value?: DiscoveredOnvifDevice[]; +} // @public export interface EndpointBase { @@ -73,7 +104,7 @@ export type EndpointBaseUnion = EndpointBase | UnsecuredEndpoint | TlsEndpoint; // @public export type ExtensionProcessorBase = ProcessorNodeBase & { - "@type": "#Microsoft.VideoAnalyzer.ExtensionProcessorBase"; + "@type": "#Microsoft.VideoAnalyzer.ExtensionProcessorBase" | "#Microsoft.VideoAnalyzer.GrpcExtension" | "#Microsoft.VideoAnalyzer.HttpExtension"; endpoint: EndpointBaseUnion; image: ImageProperties; samplingOptions?: SamplingOptions; @@ -91,7 +122,7 @@ export type FileSink = SinkNodeBase & { }; // @public -export interface GeneratedClientOptionalParams extends coreHttp.ServiceClientOptions { +export interface GeneratedClientOptionalParams extends coreClient.ServiceClientOptions { endpoint?: string; } @@ -111,6 +142,15 @@ export interface GrpcExtensionDataTransfer { // @public export type GrpcExtensionDataTransferMode = string; +// @public +export interface H264Configuration { + govLength?: number; + profile?: H264Profile; +} + +// @public +export type H264Profile = string; + // @public export type HttpExtension = ExtensionProcessorBase & { "@type": "#Microsoft.VideoAnalyzer.HttpExtension"; @@ -172,6 +212,12 @@ export interface ImageScale { // @public export type ImageScaleMode = string; +// @public +export interface IotHubDeviceConnection { + credentials?: CredentialsBaseUnion; + deviceId: string; +} + // @public export type IotHubMessageSink = SinkNodeBase & { "@type": "#Microsoft.VideoAnalyzer.IotHubMessageSink"; @@ -185,13 +231,25 @@ export type IotHubMessageSource = SourceNodeBase & { }; // @public -export const enum KnownGrpcExtensionDataTransferMode { +export enum KnownGrpcExtensionDataTransferMode { Embedded = "embedded", SharedMemory = "sharedMemory" } // @public -export const enum KnownImageFormatRawPixelFormat { +export enum KnownH264Profile { + // (undocumented) + Baseline = "Baseline", + // (undocumented) + Extended = "Extended", + // (undocumented) + High = "High", + // (undocumented) + Main = "Main" +} + +// @public +export enum KnownImageFormatRawPixelFormat { Abgr = "abgr", Argb = "argb", Bgr24 = "bgr24", @@ -206,14 +264,14 @@ export const enum KnownImageFormatRawPixelFormat { } // @public -export const enum KnownImageScaleMode { +export enum KnownImageScaleMode { Pad = "pad", PreserveAspectRatio = "preserveAspectRatio", Stretch = "stretch" } // @public -export const enum KnownLivePipelineState { +export enum KnownLivePipelineState { Activating = "activating", Active = "active", Deactivating = "deactivating", @@ -221,32 +279,46 @@ export const enum KnownLivePipelineState { } // @public -export const enum KnownMotionDetectionSensitivity { +export enum KnownMotionDetectionSensitivity { High = "high", Low = "low", Medium = "medium" } // @public -export const enum KnownObjectTrackingAccuracy { +export enum KnownMpeg4Profile { + ASP = "ASP", + SP = "SP" +} + +// @public +export enum KnownObjectTrackingAccuracy { High = "high", Low = "low", Medium = "medium" } // @public -export const enum KnownOutputSelectorOperator { +export enum KnownOnvifSystemDateTimeType { + // (undocumented) + Manual = "Manual", + // (undocumented) + Ntp = "Ntp" +} + +// @public +export enum KnownOutputSelectorOperator { Is = "is", IsNot = "isNot" } // @public -export const enum KnownOutputSelectorProperty { +export enum KnownOutputSelectorProperty { MediaType = "mediaType" } // @public -export const enum KnownParameterType { +export enum KnownParameterType { Bool = "bool", Double = "double", Int = "int", @@ -255,36 +327,43 @@ export const enum KnownParameterType { } // @public -export const enum KnownRtspTransport { +export enum KnownRtspTransport { Http = "http", Tcp = "tcp" } // @public -export const enum KnownSpatialAnalysisOperationFocus { +export enum KnownSpatialAnalysisOperationFocus { BottomCenter = "bottomCenter", Center = "center", Footprint = "footprint" } // @public -export const enum KnownSpatialAnalysisPersonCountEventTrigger { +export enum KnownSpatialAnalysisPersonCountEventTrigger { Event = "event", Interval = "interval" } // @public -export const enum KnownSpatialAnalysisPersonDistanceEventTrigger { +export enum KnownSpatialAnalysisPersonDistanceEventTrigger { Event = "event", Interval = "interval" } // @public -export const enum KnownSpatialAnalysisPersonZoneCrossingEventType { +export enum KnownSpatialAnalysisPersonZoneCrossingEventType { ZoneCrossing = "zoneCrossing", ZoneDwellTime = "zoneDwellTime" } +// @public +export enum KnownVideoEncoding { + H264 = "H264", + Jpeg = "JPEG", + Mpeg4 = "MPEG4" +} + // @public export type LineCrossingProcessor = ProcessorNodeBase & { "@type": "#Microsoft.VideoAnalyzer.LineCrossingProcessor"; @@ -315,9 +394,21 @@ export interface LivePipelineProperties { // @public export type LivePipelineState = string; +// @public +export interface MediaProfile { + mediaUri?: Record; + name?: string; + videoEncoderConfiguration?: VideoEncoderConfiguration; +} + +// @public +export interface MediaUri { + uri?: string; +} + // @public export interface MethodRequest { - apiVersion?: "1.0"; + apiVersion?: "1.1"; methodName: "undefined"; } @@ -332,6 +423,15 @@ export type MotionDetectionProcessor = ProcessorNodeBase & { // @public export type MotionDetectionSensitivity = string; +// @public +export interface Mpeg4Configuration { + govLength?: number; + profile?: Mpeg4Profile; +} + +// @public +export type Mpeg4Profile = string; + // @public export interface NamedLineBase { "@type": "#Microsoft.VideoAnalyzer.NamedLineString"; @@ -382,6 +482,37 @@ export type ObjectTrackingProcessor = ProcessorNodeBase & { accuracy?: ObjectTrackingAccuracy; }; +// @public +export interface OnvifDevice { + dns?: OnvifDns; + hostname?: OnvifHostName; + mediaProfiles?: MediaProfile[]; + systemDateTime?: OnvifSystemDateTime; +} + +// @public +export interface OnvifDns { + fromDhcp?: boolean; + ipv4Address?: string[]; + ipv6Address?: string[]; +} + +// @public +export interface OnvifHostName { + fromDhcp?: boolean; + hostname?: string; +} + +// @public +export interface OnvifSystemDateTime { + time?: string; + timeZone?: string; + type?: OnvifSystemDateTimeType; +} + +// @public +export type OnvifSystemDateTimeType = string; + // @public export interface OutputSelector { operator?: OutputSelectorOperator; @@ -451,16 +582,48 @@ export interface ProcessorNodeBase { export type ProcessorNodeBaseUnion = ProcessorNodeBase | MotionDetectionProcessor | ObjectTrackingProcessor | LineCrossingProcessor | ExtensionProcessorBaseUnion | SignalGateProcessor | CognitiveServicesVisionProcessor; // @public -interface Request_2> { +export interface RateControl { + bitRateLimit?: number; + encodingInterval?: number; + frameRateLimit?: number; + guaranteedFrameRate?: boolean; +} + +// @public +export interface RemoteDeviceAdapter { + name: string; + properties?: RemoteDeviceAdapterProperties; + systemData?: SystemData; +} + +// @public +export interface RemoteDeviceAdapterCollection { + continuationToken?: string; + value?: RemoteDeviceAdapter[]; +} + +// @public +export interface RemoteDeviceAdapterProperties { + description?: string; + iotHubDeviceConnection: IotHubDeviceConnection; + target: RemoteDeviceAdapterTarget; +} + +// @public +export interface RemoteDeviceAdapterTarget { + host: string; +} + +// @public +export interface Request> { methodName: string; payload: T & { "@apiVersion": string; }; } -export { Request_2 as Request } // @public -export type RequestType = "pipelineTopologySet" | "pipelineTopologyGet" | "pipelineTopologyList" | "pipelineTopologyDelete" | "livePipelineSet" | "livePipelineGet" | "livePipelineList" | "livePipelineDelete" | "livePipelineActivate" | "livePipelineDeactivate"; +export type RequestType = "pipelineTopologySet" | "pipelineTopologyGet" | "pipelineTopologyList" | "pipelineTopologyDelete" | "livePipelineSet" | "livePipelineGet" | "livePipelineList" | "livePipelineDelete" | "livePipelineActivate" | "livePipelineDeactivate" | "onvifDeviceDiscover" | "onvifDeviceGet" | "remoteDeviceAdapterSet" | "remoteDeviceAdapterList" | "remoteDeviceAdapterGet" | "remoteDeviceAdapterDelete"; // @public export type RtspSource = SourceNodeBase & { @@ -610,16 +773,25 @@ export interface SpatialAnalysisPersonZoneCrossingZoneEvents { // @public export type SpatialAnalysisTypedOperationBase = SpatialAnalysisOperationBase & { - "@type": "SpatialAnalysisTypedOperationBase"; + "@type": "SpatialAnalysisTypedOperationBase" | "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonCountOperation" | "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonZoneCrossingOperation" | "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonDistanceOperation" | "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonLineCrossingOperation"; debug?: string; + calibrationConfiguration?: string; cameraConfiguration?: string; + cameraCalibratorNodeConfiguration?: string; detectorNodeConfiguration?: string; + trackerNodeConfiguration?: string; enableFaceMaskClassifier?: string; }; // @public (undocumented) export type SpatialAnalysisTypedOperationBaseUnion = SpatialAnalysisTypedOperationBase | SpatialAnalysisPersonCountOperation | SpatialAnalysisPersonZoneCrossingOperation | SpatialAnalysisPersonDistanceOperation | SpatialAnalysisPersonLineCrossingOperation; +// @public +export type SymmetricKeyCredentials = CredentialsBase & { + "@type": "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials"; + key: string; +}; + // @public export interface SystemData { createdAt?: Date; @@ -654,15 +826,41 @@ export type UsernamePasswordCredentials = CredentialsBase & { // @public export interface VideoCreationProperties { description?: string; + retentionPeriod?: string; segmentLength?: string; title?: string; } +// @public +export interface VideoEncoderConfiguration { + encoding?: VideoEncoding; + h264?: H264Configuration; + mpeg4?: Mpeg4Configuration; + quality?: number; + rateControl?: RateControl; + resolution?: VideoResolution; +} + +// @public +export type VideoEncoding = string; + +// @public +export interface VideoPublishingOptions { + enableVideoPreviewImage?: string; +} + +// @public +export interface VideoResolution { + height?: number; + width?: number; +} + // @public export type VideoSink = SinkNodeBase & { "@type": "#Microsoft.VideoAnalyzer.VideoSink"; videoName: string; videoCreationProperties?: VideoCreationProperties; + videoPublishingOptions?: VideoPublishingOptions; localMediaCachePath: string; localMediaCacheMaximumSizeMiB: string; }; diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/src/lvaInvokeModuleSample.ts b/sdk/videoanalyzer/video-analyzer-edge/samples-dev/lvaInvokeModuleSample.ts similarity index 59% rename from sdk/videoanalyzer/video-analyzer-edge/samples/typescript/src/lvaInvokeModuleSample.ts rename to sdk/videoanalyzer/video-analyzer-edge/samples-dev/lvaInvokeModuleSample.ts index 4759fc58976e..99cc76a05702 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/src/lvaInvokeModuleSample.ts +++ b/sdk/videoanalyzer/video-analyzer-edge/samples-dev/lvaInvokeModuleSample.ts @@ -1,15 +1,21 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +/** + * @summary Demonstrates the use of a Azure Video Analyzer Edge sdk. + */ + import { PipelineTopology, - Request, RtspSource, UnsecuredEndpoint, NodeInput, LivePipeline, + Request, createRequest, - IotHubMessageSink + RemoteDeviceAdapterProperties, + RemoteDeviceAdapter, + VideoSink } from "@azure/video-analyzer-edge"; import { Client } from "azure-iothub"; @@ -35,12 +41,13 @@ function buildPipelineTopology() { nodeName: "rtspSource" }; - const msgSink: IotHubMessageSink = { - //Create a sink for your pipeline topology - name: "msgSink", + const videoSink: VideoSink = { + name: "videoSink", inputs: [nodeInput], - hubOutputName: "${hubSinkOutputName}", - "@type": "#Microsoft.VideoAnalyzer.IotHubMessageSink" + videoName: "video", + localMediaCachePath: "/var/lib/videoanalyzer/tmp/", + localMediaCacheMaximumSizeMiB: "1024", + "@type": "#Microsoft.VideoAnalyzer.VideoSink" }; const pipelineTopology: PipelineTopology = { @@ -50,11 +57,10 @@ function buildPipelineTopology() { parameters: [ { name: "rtspUserName", type: "String", default: "testUsername" }, { name: "rtspPassword", type: "SecretString", default: "testPassword" }, - { name: "rtspUrl", type: "String" }, - { name: "hubSinkOutputName", type: "String" } + { name: "rtspUrl", type: "String" } ], sources: [rtspSource], - sinks: [msgSink] + sinks: [videoSink] } }; @@ -74,15 +80,35 @@ function buildLivePipeline(pipelineTopologyName: string) { return livePipeline; } +function createRemoteDeviceAdapter(deviceName: string, iotDeviceName: string): RemoteDeviceAdapter { + const remoteDeviceProperties: RemoteDeviceAdapterProperties = { + target: { host: "camerasimulator" }, + iotHubDeviceConnection: { + deviceId: iotDeviceName, + credentials: { + "@type": "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials", + key: process.env.iothub_deviceprimarykey + } + } + }; + + const remoteDeviceAdapter: RemoteDeviceAdapter = { + name: deviceName, + properties: remoteDeviceProperties + }; + + return remoteDeviceAdapter; +} + export async function main() { - const deviceId = "lva-sample-device"; - const moduleId = "mediaEdge"; - const connectionString = "connectionString"; - const iotHubClient = Client.fromConnectionString(connectionString); //Connect to your IoT Hub + const deviceId = process.env.iothub_deviceid; + const moduleId = process.env.iothub_moduleid; + const connectionString = process.env.iothub_connectionstring; + const iotHubClient = Client.fromConnectionString(connectionString ?? ""); //Connect to your IoT Hub async function invokeMethodHelper(methodRequest: Request) { //Helper method to send a module method request to your IoT Hub device - return await iotHubClient.invokeDeviceMethod(deviceId, moduleId, { + return await iotHubClient.invokeDeviceMethod(deviceId ?? "", moduleId ?? "", { methodName: methodRequest.methodName, payload: methodRequest.payload }); @@ -130,6 +156,50 @@ export async function main() { const deletePipelineTopRequest = createRequest("pipelineTopologyDelete", pipelineTopology.name); const deletePipelineTopResponse = await invokeMethodHelper(deletePipelineTopRequest); console.log(deletePipelineTopResponse); + + const endpoint: UnsecuredEndpoint = { + url: "http://camerasimulator:8554", + "@type": "#Microsoft.VideoAnalyzer.UnsecuredEndpoint" + }; + const getOnvifDeviceRequest = createRequest("onvifDeviceGet", endpoint); + const getOnvifDeviceResponse = await invokeMethodHelper(getOnvifDeviceRequest); + console.log(getOnvifDeviceResponse); + + const listOnvifDeviceRequest = createRequest("onvifDeviceDiscover"); + const listOnvifDeviceResponse = await invokeMethodHelper(listOnvifDeviceRequest); + console.log(listOnvifDeviceResponse); + + const remoteDeviceAdapter = await createRemoteDeviceAdapter( + "remoteDeviceAdapterSample", + "iotDeviceNameSample" + ); + console.log(remoteDeviceAdapter); + const setRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterSet", + remoteDeviceAdapter + ); + const setRemoteDeviceAdapterResponse = await invokeMethodHelper(setRemoteDeviceAdapterRequest); + console.log(setRemoteDeviceAdapterResponse); + + const getRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterGet", + remoteDeviceAdapter.name + ); + const getRemoteDeviceAdapterResponse = await invokeMethodHelper(getRemoteDeviceAdapterRequest); + console.log(getRemoteDeviceAdapterResponse); + + const listRemoteDeviceAdapterRequest = createRequest("remoteDeviceAdapterList"); + const listRemoteDeviceAdapterResponse = await invokeMethodHelper(listRemoteDeviceAdapterRequest); + console.log(listRemoteDeviceAdapterResponse); + + const deleteRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterDelete", + remoteDeviceAdapter.name + ); + const deleteRemoteDeviceAdapterResponse = await invokeMethodHelper( + deleteRemoteDeviceAdapterRequest + ); + console.log(deleteRemoteDeviceAdapterResponse); } main().catch((err) => { diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/README.md b/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/README.md deleted file mode 100644 index 375fc9b20462..000000000000 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# Azure Video Analyzer Edge client library for JavaScript - -## Getting started - -### Install the package - -Install the Video Analyzer client library for Typescript with npm: - -```bash -npm install @azure/video-analyzer-edge -``` - -### Prerequisites - -- TypeScript v3.6. -- You need an active [Azure subscription][azure_sub], and a [IoT device connection string][iot_device_connection_string] to use this package. -- To interact with Azure IoT Hub you will need to run `npm install azure-iothub` -- You will need to use the version of the SDK that corresponds to the version of the Video Analyzer Edge module you are using. - - | SDK | Video Analyzer Edge Module | - | ------- | -------------------------- | - | 1.0.0-beta.1 | 1.0 | - -### Running the sample - -Replace the variables `connectionString`, `deviceId`, and `moduleId` with your respective values. You can find these values from your Azure IoT hub. You should then be able to run the sample and send requests to your IoT hub. - - - -[azure_sub]: https://azure.microsoft.com/free/ - -[iot_device_connection_string]: TODO://link-to-published-package diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/tsconfig.json b/sdk/videoanalyzer/video-analyzer-edge/samples/tsconfig.json deleted file mode 100644 index 8c89eac7173a..000000000000 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../tsconfig.json", - "compilerOptions": { - "module": "commonjs", - "outDir": "typescript/dist", - "lib": ["DOM", "ES6"] - }, - "include": ["typescript/src/**.ts"], - "exclude": ["typescript/*.json", "**/node_modules/", "../node_modules", "../typings"] -} diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/README.md b/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/README.md deleted file mode 100644 index ae3201110d97..000000000000 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# Azure Video Analyzer Edge client library for TypeScript - -## Getting started - -### Install the package - -Install the Video Analyzer client library for Typescript with npm: - -```bash -npm install @azure/video-analyzer-edge -``` - -### Prerequisites - -- TypeScript v3.6. -- You need an active [Azure subscription][azure_sub], and a [IoT device connection string][iot_device_connection_string] to use this package. -- To interact with Azure IoT Hub you will need to run `npm install azure-iothub` -- You will need to use the version of the SDK that corresponds to the version of the Video Analyzer Edge module you are using. - - | SDK | Video Analyzer Edge Module | - | ------- | -------------------------- | - | 1.0.0-beta.1 | 1.0 | - -### Running the sample - -Replace the variables `connectionString`, `deviceId`, and `moduleId` with your respective values. You can find these values from your Azure IoT hub. You should then be able to run the sample and send requests to your IoT hub. - - - -[azure_sub]: https://azure.microsoft.com/free/ - -[iot_device_connection_string]: TODO://link-to-published-package diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/package.json b/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/package.json deleted file mode 100644 index 1d2bcbdb8046..000000000000 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@azure/video-analyzer-edge", - "private": true, - "version": "0.1.0", - "description": "Azure Video Analyzer library for JavaScript", - "engine": { - "node": ">=12.0.0" - }, - "scripts": { - "build": "tsc", - "prebuild": "rimraf dist/" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/Azure/azure-sdk-for-js.git" - }, - "keywords": [ - "Azure", - "Node.js", - "TypeScript" - ], - "author": "Microsoft Corporation", - "license": "MIT", - "bugs": { - "url": "https://github.com/Azure/azure-sdk-for-js/issues" - }, - "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/template/template", - "sideEffects": false, - "dependencies": { - "dotenv": "^8.2.0" - }, - "devDependencies": { - "@types/node": "^12.0.0", - "rimraf": "^3.0.0", - "typescript": "~3.6.4" - } -} diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/README.md b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/README.md new file mode 100644 index 000000000000..151a7964a29e --- /dev/null +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/README.md @@ -0,0 +1,62 @@ +--- +page_type: sample +languages: + - javascript +products: + - azure +urlFragment: video-analyzer-edge-javascript +--- + +# Azure Video Analyzer client library samples for JavaScript + +These sample programs show how to use the JavaScript client libraries for Azure Video Analyzer in some common scenarios. + +| **File Name** | **Description** | +| ------------------------------------------------- | -------------------------------------------------------- | +| [lvaInvokeModuleSample.js][lvainvokemodulesample] | Demonstrates the use of a Azure Video Analyzer Edge sdk. | + +## Prerequisites + +The sample programs are compatible with [LTS versions of Node.js](https://nodejs.org/about/releases/). + +You need [an Azure subscription][freesub] and the following Azure resources to run these sample programs: + +- [Azure IoT Hub account][createinstance_azureiothubaccount] + +Samples retrieve credentials to access the service endpoint from environment variables. Alternatively, edit the source code to include the appropriate credentials. See each individual sample for details on which environment variables/credentials it requires to function. + +Adapting the samples to run in the browser may require some additional consideration. For details, please see the [package README][package]. + +## Setup + +To run the samples using the published version of the package: + +1. Install the dependencies using `npm`: + +```bash +npm install +``` + +2. Edit the file `sample.env`, adding the correct credentials to access the Azure service and run the samples. Then rename the file from `sample.env` to just `.env`. The sample programs will read this file automatically. + +3. Run whichever samples you like (note that some samples may require additional setup, see the table above): + +```bash +node lvaInvokeModuleSample.js +``` + +Alternatively, run a single sample with the correct environment variables set (setting up the `.env` file is not required if you do this), for example (cross-platform): + +```bash +npx cross-env iothub_deviceprimarykey="" iothub_deviceid="" iothub_moduleid="" iothub_connectionstring="" node lvaInvokeModuleSample.js +``` + +## Next Steps + +Take a look at our [API Documentation][apiref] for more information about the APIs that are available in the clients. + +[lvainvokemodulesample]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/lvaInvokeModuleSample.js +[apiref]: https://docs.microsoft.com/javascript/api/@azure/video-analyzer-edge +[freesub]: https://azure.microsoft.com/free/ +[createinstance_azureiothubaccount]: https://docs.microsoft.com/azure/iot-hub/iot-hub-create-through-portal +[package]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/videoanalyzer/video-analyzer-edge/README.md diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/lvaInvokeModule.js b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/lvaInvokeModuleSample.js similarity index 59% rename from sdk/videoanalyzer/video-analyzer-edge/samples/javascript/lvaInvokeModule.js rename to sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/lvaInvokeModuleSample.js index c5f9b26e3077..cca43fff6bb4 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/lvaInvokeModule.js +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/lvaInvokeModuleSample.js @@ -1,6 +1,10 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +/** + * @summary Demonstrates the use of a Azure Video Analyzer Edge sdk. + */ + const { createRequest } = require("@azure/video-analyzer-edge"); const { Client } = require("azure-iothub"); @@ -26,12 +30,13 @@ function buildPipelineTopology() { nodeName: "rtspSource" }; - const msgSink = { - //Create a sink for your pipeline topology - name: "msgSink", + const videoSink = { + name: "videoSink", inputs: [nodeInput], - hubOutputName: "${hubSinkOutputName}", - "@type": "#Microsoft.VideoAnalyzer.IotHubMessageSink" + videoName: "video", + localMediaCachePath: "/var/lib/videoanalyzer/tmp/", + localMediaCacheMaximumSizeMiB: "1024", + "@type": "#Microsoft.VideoAnalyzer.VideoSink" }; const pipelineTopology = { @@ -41,11 +46,10 @@ function buildPipelineTopology() { parameters: [ { name: "rtspUserName", type: "String", default: "testUsername" }, { name: "rtspPassword", type: "SecretString", default: "testPassword" }, - { name: "rtspUrl", type: "String" }, - { name: "hubSinkOutputName", type: "String" } + { name: "rtspUrl", type: "String" } ], sources: [rtspSource], - sinks: [msgSink] + sinks: [videoSink] } }; @@ -65,15 +69,35 @@ function buildLivePipeline(pipelineTopologyName) { return livePipeline; } +function createRemoteDeviceAdapter(deviceName, iotDeviceName) { + const remoteDeviceProperties = { + target: { host: "camerasimulator" }, + iotHubDeviceConnection: { + deviceId: iotDeviceName, + credentials: { + "@type": "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials", + key: process.env.iothub_deviceprimarykey + } + } + }; + + const remoteDeviceAdapter = { + name: deviceName, + properties: remoteDeviceProperties + }; + + return remoteDeviceAdapter; +} + async function main() { - const deviceId = "lva-sample-device"; - const moduleId = "mediaEdge"; - const connectionString = "connectionString"; - const iotHubClient = Client.fromConnectionString(connectionString); //Connect to your IoT Hub + const deviceId = process.env.iothub_deviceid; + const moduleId = process.env.iothub_moduleid; + const connectionString = process.env.iothub_connectionstring; + const iotHubClient = Client.fromConnectionString(connectionString ?? ""); //Connect to your IoT Hub async function invokeMethodHelper(methodRequest) { //Helper method to send a module method request to your IoT Hub device - return await iotHubClient.invokeDeviceMethod(deviceId, moduleId, { + return await iotHubClient.invokeDeviceMethod(deviceId ?? "", moduleId ?? "", { methodName: methodRequest.methodName, payload: methodRequest.payload }); @@ -121,6 +145,50 @@ async function main() { const deletePipelineTopRequest = createRequest("pipelineTopologyDelete", pipelineTopology.name); const deletePipelineTopResponse = await invokeMethodHelper(deletePipelineTopRequest); console.log(deletePipelineTopResponse); + + const endpoint = { + url: "http://camerasimulator:8554", + "@type": "#Microsoft.VideoAnalyzer.UnsecuredEndpoint" + }; + const getOnvifDeviceRequest = createRequest("onvifDeviceGet", endpoint); + const getOnvifDeviceResponse = await invokeMethodHelper(getOnvifDeviceRequest); + console.log(getOnvifDeviceResponse); + + const listOnvifDeviceRequest = createRequest("onvifDeviceDiscover"); + const listOnvifDeviceResponse = await invokeMethodHelper(listOnvifDeviceRequest); + console.log(listOnvifDeviceResponse); + + const remoteDeviceAdapter = await createRemoteDeviceAdapter( + "remoteDeviceAdapterSample", + "iotDeviceNameSample" + ); + console.log(remoteDeviceAdapter); + const setRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterSet", + remoteDeviceAdapter + ); + const setRemoteDeviceAdapterResponse = await invokeMethodHelper(setRemoteDeviceAdapterRequest); + console.log(setRemoteDeviceAdapterResponse); + + const getRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterGet", + remoteDeviceAdapter.name + ); + const getRemoteDeviceAdapterResponse = await invokeMethodHelper(getRemoteDeviceAdapterRequest); + console.log(getRemoteDeviceAdapterResponse); + + const listRemoteDeviceAdapterRequest = createRequest("remoteDeviceAdapterList"); + const listRemoteDeviceAdapterResponse = await invokeMethodHelper(listRemoteDeviceAdapterRequest); + console.log(listRemoteDeviceAdapterResponse); + + const deleteRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterDelete", + remoteDeviceAdapter.name + ); + const deleteRemoteDeviceAdapterResponse = await invokeMethodHelper( + deleteRemoteDeviceAdapterRequest + ); + console.log(deleteRemoteDeviceAdapterResponse); } main().catch((err) => { diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/package.json b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/package.json similarity index 56% rename from sdk/videoanalyzer/video-analyzer-edge/samples/javascript/package.json rename to sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/package.json index 682d05a795d1..a9bb91a344a9 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/package.json +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/package.json @@ -1,9 +1,9 @@ { - "name": "azure-lva-invoke-modules-sample-js", + "name": "azure-video-analyzer-edge-samples-js", "private": true, - "version": "0.1.0", - "description": "Sample code demonstrating how to invoke modules on lva pipelines in Azure IoT hub", - "engine": { + "version": "1.0.0", + "description": "Azure Video Analyzer client library samples for JavaScript", + "engines": { "node": ">=12.0.0" }, "repository": { @@ -12,19 +12,19 @@ "directory": "sdk/videoanalyzer/video-analyzer-edge" }, "keywords": [ - "Azure", - "Node.js", - "JavaScript" + "azure", + "cloud", + "typescript" ], "author": "Microsoft Corporation", "license": "MIT", "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, - "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/mediaservices", - "sideEffects": false, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/videoanalyzer/video-analyzer-edge", "dependencies": { - "@azure/template": "latest", - "dotenv": "^8.2.0" + "@azure/video-analyzer-edge": "next", + "dotenv": "latest", + "azure-iothub": "^1.14.6" } } diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/sample.env b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/sample.env similarity index 70% rename from sdk/videoanalyzer/video-analyzer-edge/samples/javascript/sample.env rename to sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/sample.env index bec0f536a952..c231c9804b0b 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/javascript/sample.env +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/javascript/sample.env @@ -1,7 +1,7 @@ # Used in most samples. Retrieve these values from a Cognitive Services instance # in the Azure Portal. -ENDPOINT="" -API_KEY="" +ENDPOINT="Your Endpoint URL" +API_KEY="Your API key" # Used to authenticate using Azure AD as a service principal for role-based authentication # in the tokenAuth sample. @@ -11,3 +11,7 @@ API_KEY="" AZURE_TENANT_ID= AZURE_CLIENT_ID= AZURE_CLIENT_SECRET= + +# Our tests assume that TEST_MODE is "playback" by default. You can +# change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. +# TEST_MODE=playback \ No newline at end of file diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/README.md b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/README.md new file mode 100644 index 000000000000..ee8d7cf87581 --- /dev/null +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/README.md @@ -0,0 +1,75 @@ +--- +page_type: sample +languages: + - typescript +products: + - azure +urlFragment: video-analyzer-edge-typescript +--- + +# Azure Video Analyzer client library samples for TypeScript + +These sample programs show how to use the TypeScript client libraries for Azure Video Analyzer in some common scenarios. + +| **File Name** | **Description** | +| ------------------------------------------------- | -------------------------------------------------------- | +| [lvaInvokeModuleSample.ts][lvainvokemodulesample] | Demonstrates the use of a Azure Video Analyzer Edge sdk. | + +## Prerequisites + +The sample programs are compatible with [LTS versions of Node.js](https://nodejs.org/about/releases/). + +Before running the samples in Node, they must be compiled to JavaScript using the TypeScript compiler. For more information on TypeScript, see the [TypeScript documentation][typescript]. Install the TypeScript compiler using: + +```bash +npm install -g typescript +``` + +You need [an Azure subscription][freesub] and the following Azure resources to run these sample programs: + +- [Azure IoT Hub account][createinstance_azureiothubaccount] + +Samples retrieve credentials to access the service endpoint from environment variables. Alternatively, edit the source code to include the appropriate credentials. See each individual sample for details on which environment variables/credentials it requires to function. + +Adapting the samples to run in the browser may require some additional consideration. For details, please see the [package README][package]. + +## Setup + +To run the samples using the published version of the package: + +1. Install the dependencies using `npm`: + +```bash +npm install +``` + +2. Compile the samples: + +```bash +npm run build +``` + +3. Edit the file `sample.env`, adding the correct credentials to access the Azure service and run the samples. Then rename the file from `sample.env` to just `.env`. The sample programs will read this file automatically. + +4. Run whichever samples you like (note that some samples may require additional setup, see the table above): + +```bash +node dist/lvaInvokeModuleSample.js +``` + +Alternatively, run a single sample with the correct environment variables set (setting up the `.env` file is not required if you do this), for example (cross-platform): + +```bash +npx cross-env iothub_deviceprimarykey="" iothub_deviceid="" iothub_moduleid="" iothub_connectionstring="" node dist/lvaInvokeModuleSample.js +``` + +## Next Steps + +Take a look at our [API Documentation][apiref] for more information about the APIs that are available in the clients. + +[lvainvokemodulesample]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/src/lvaInvokeModuleSample.ts +[apiref]: https://docs.microsoft.com/javascript/api/@azure/video-analyzer-edge +[freesub]: https://azure.microsoft.com/free/ +[createinstance_azureiothubaccount]: https://docs.microsoft.com/azure/iot-hub/iot-hub-create-through-portal +[package]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/videoanalyzer/video-analyzer-edge/README.md +[typescript]: https://www.typescriptlang.org/docs/home.html diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/package.json b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/package.json new file mode 100644 index 000000000000..5b5214269892 --- /dev/null +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/package.json @@ -0,0 +1,38 @@ +{ + "name": "azure-video-analyzer-edge-samples-ts", + "private": true, + "version": "1.0.0", + "description": "Azure Video Analyzer client library samples for TypeScript", + "engines": { + "node": ">=12.0.0" + }, + "scripts": { + "build": "tsc", + "prebuild": "rimraf dist/" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Azure/azure-sdk-for-js.git", + "directory": "sdk/videoanalyzer/video-analyzer-edge" + }, + "keywords": [ + "azure", + "cloud", + "typescript" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/videoanalyzer/video-analyzer-edge", + "dependencies": { + "@azure/video-analyzer-edge": "next", + "dotenv": "latest", + "azure-iothub": "^1.14.6" + }, + "devDependencies": { + "typescript": "~4.4.0", + "rimraf": "latest" + } +} diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/sample.env b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/sample.env new file mode 100644 index 000000000000..c231c9804b0b --- /dev/null +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/sample.env @@ -0,0 +1,17 @@ +# Used in most samples. Retrieve these values from a Cognitive Services instance +# in the Azure Portal. +ENDPOINT="Your Endpoint URL" +API_KEY="Your API key" + +# Used to authenticate using Azure AD as a service principal for role-based authentication +# in the tokenAuth sample. +# +# See the documentation for `EnvironmentCredential` at the following link: +# https://docs.microsoft.com/javascript/api/@azure/identity/environmentcredential +AZURE_TENANT_ID= +AZURE_CLIENT_ID= +AZURE_CLIENT_SECRET= + +# Our tests assume that TEST_MODE is "playback" by default. You can +# change it to "record" to generate new recordings, or "live" to bypass the recorder entirely. +# TEST_MODE=playback \ No newline at end of file diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/src/lvaInvokeModuleSample.ts b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/src/lvaInvokeModuleSample.ts new file mode 100644 index 000000000000..99cc76a05702 --- /dev/null +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/src/lvaInvokeModuleSample.ts @@ -0,0 +1,207 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +/** + * @summary Demonstrates the use of a Azure Video Analyzer Edge sdk. + */ + +import { + PipelineTopology, + RtspSource, + UnsecuredEndpoint, + NodeInput, + LivePipeline, + Request, + createRequest, + RemoteDeviceAdapterProperties, + RemoteDeviceAdapter, + VideoSink +} from "@azure/video-analyzer-edge"; + +import { Client } from "azure-iothub"; + +function buildPipelineTopology() { + const rtspSource: RtspSource = { + //Create a source for your pipeline topology + name: "rtspSource", + endpoint: { + url: "${rtspUrl}", + "@type": "#Microsoft.VideoAnalyzer.UnsecuredEndpoint", + credentials: { + username: "${rtspUserName}", + password: "${rtspPassword}", + "@type": "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials" + } + } as UnsecuredEndpoint, + "@type": "#Microsoft.VideoAnalyzer.RtspSource" + }; + + const nodeInput: NodeInput = { + //Create an input for your sink + nodeName: "rtspSource" + }; + + const videoSink: VideoSink = { + name: "videoSink", + inputs: [nodeInput], + videoName: "video", + localMediaCachePath: "/var/lib/videoanalyzer/tmp/", + localMediaCacheMaximumSizeMiB: "1024", + "@type": "#Microsoft.VideoAnalyzer.VideoSink" + }; + + const pipelineTopology: PipelineTopology = { + name: "jsTestTopology", + properties: { + description: "description for jsTestTopology", + parameters: [ + { name: "rtspUserName", type: "String", default: "testUsername" }, + { name: "rtspPassword", type: "SecretString", default: "testPassword" }, + { name: "rtspUrl", type: "String" } + ], + sources: [rtspSource], + sinks: [videoSink] + } + }; + + return pipelineTopology; +} + +function buildLivePipeline(pipelineTopologyName: string) { + const livePipeline: LivePipeline = { + name: "jsLivePipelineTest", + properties: { + description: "description", + topologyName: pipelineTopologyName, + parameters: [{ name: "rtspUrl", value: "rtsp://sample.com" }] + } + }; + + return livePipeline; +} + +function createRemoteDeviceAdapter(deviceName: string, iotDeviceName: string): RemoteDeviceAdapter { + const remoteDeviceProperties: RemoteDeviceAdapterProperties = { + target: { host: "camerasimulator" }, + iotHubDeviceConnection: { + deviceId: iotDeviceName, + credentials: { + "@type": "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials", + key: process.env.iothub_deviceprimarykey + } + } + }; + + const remoteDeviceAdapter: RemoteDeviceAdapter = { + name: deviceName, + properties: remoteDeviceProperties + }; + + return remoteDeviceAdapter; +} + +export async function main() { + const deviceId = process.env.iothub_deviceid; + const moduleId = process.env.iothub_moduleid; + const connectionString = process.env.iothub_connectionstring; + const iotHubClient = Client.fromConnectionString(connectionString ?? ""); //Connect to your IoT Hub + + async function invokeMethodHelper(methodRequest: Request) { + //Helper method to send a module method request to your IoT Hub device + return await iotHubClient.invokeDeviceMethod(deviceId ?? "", moduleId ?? "", { + methodName: methodRequest.methodName, + payload: methodRequest.payload + }); + } + + const pipelineTopology = buildPipelineTopology(); + const livePipeline = buildLivePipeline(pipelineTopology.name); + + const pipelineTopologySetRequest = createRequest("pipelineTopologySet", pipelineTopology); + const setPipelineTopResponse = await invokeMethodHelper(pipelineTopologySetRequest); + console.log(setPipelineTopResponse); + + const listPipelineTopologyRequest = createRequest("pipelineTopologyList"); + const listPipelineTopologyResponse = await invokeMethodHelper(listPipelineTopologyRequest); + console.log(listPipelineTopologyResponse); + + const getPipelineTopologyRequest = createRequest("pipelineTopologyGet", pipelineTopology.name); + const getPipelineTopologyResponse = await invokeMethodHelper(getPipelineTopologyRequest); + console.log(getPipelineTopologyResponse); + + const setLivePipelineRequest = createRequest("livePipelineSet", livePipeline); + const setLivePipelineResponse = await invokeMethodHelper(setLivePipelineRequest); + console.log(setLivePipelineResponse); + + const listLivePipelineRequest = createRequest("livePipelineList"); + const listLivePipelineResponse = await invokeMethodHelper(listLivePipelineRequest); + console.log(listLivePipelineResponse); + + const activateLivePipelineRequest = createRequest("livePipelineActivate", livePipeline.name); + const activateLivePipelineResponse = await invokeMethodHelper(activateLivePipelineRequest); + console.log(activateLivePipelineResponse); + + const getLivePipelineRequest = createRequest("livePipelineGet", livePipeline.name); + const getLivePipelineResponse = await invokeMethodHelper(getLivePipelineRequest); + console.log(getLivePipelineResponse); + + const deactivateLivePipelineRequest = createRequest("livePipelineDeactivate", livePipeline.name); + const deactivateLivePipelineResponse = await invokeMethodHelper(deactivateLivePipelineRequest); + console.log(deactivateLivePipelineResponse); + + const deleteLivePipelineRequest = createRequest("livePipelineDelete", livePipeline.name); + const deleteLivePipelineResponse = await invokeMethodHelper(deleteLivePipelineRequest); + console.log(deleteLivePipelineResponse); + + const deletePipelineTopRequest = createRequest("pipelineTopologyDelete", pipelineTopology.name); + const deletePipelineTopResponse = await invokeMethodHelper(deletePipelineTopRequest); + console.log(deletePipelineTopResponse); + + const endpoint: UnsecuredEndpoint = { + url: "http://camerasimulator:8554", + "@type": "#Microsoft.VideoAnalyzer.UnsecuredEndpoint" + }; + const getOnvifDeviceRequest = createRequest("onvifDeviceGet", endpoint); + const getOnvifDeviceResponse = await invokeMethodHelper(getOnvifDeviceRequest); + console.log(getOnvifDeviceResponse); + + const listOnvifDeviceRequest = createRequest("onvifDeviceDiscover"); + const listOnvifDeviceResponse = await invokeMethodHelper(listOnvifDeviceRequest); + console.log(listOnvifDeviceResponse); + + const remoteDeviceAdapter = await createRemoteDeviceAdapter( + "remoteDeviceAdapterSample", + "iotDeviceNameSample" + ); + console.log(remoteDeviceAdapter); + const setRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterSet", + remoteDeviceAdapter + ); + const setRemoteDeviceAdapterResponse = await invokeMethodHelper(setRemoteDeviceAdapterRequest); + console.log(setRemoteDeviceAdapterResponse); + + const getRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterGet", + remoteDeviceAdapter.name + ); + const getRemoteDeviceAdapterResponse = await invokeMethodHelper(getRemoteDeviceAdapterRequest); + console.log(getRemoteDeviceAdapterResponse); + + const listRemoteDeviceAdapterRequest = createRequest("remoteDeviceAdapterList"); + const listRemoteDeviceAdapterResponse = await invokeMethodHelper(listRemoteDeviceAdapterRequest); + console.log(listRemoteDeviceAdapterResponse); + + const deleteRemoteDeviceAdapterRequest = createRequest( + "remoteDeviceAdapterDelete", + remoteDeviceAdapter.name + ); + const deleteRemoteDeviceAdapterResponse = await invokeMethodHelper( + deleteRemoteDeviceAdapterRequest + ); + console.log(deleteRemoteDeviceAdapterResponse); +} + +main().catch((err) => { + console.error("The sample encountered an error:", err); +}); diff --git a/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/tsconfig.json b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/tsconfig.json similarity index 65% rename from sdk/videoanalyzer/video-analyzer-edge/samples/typescript/tsconfig.json rename to sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/tsconfig.json index 5ed056486b40..416c2dd82e00 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/samples/typescript/tsconfig.json +++ b/sdk/videoanalyzer/video-analyzer-edge/samples/v1/typescript/tsconfig.json @@ -1,16 +1,17 @@ { "compilerOptions": { + "target": "ES2018", "module": "commonjs", "moduleResolution": "node", - + "resolveJsonModule": true, + "esModuleInterop": true, "allowSyntheticDefaultImports": true, - "strict": true, "alwaysStrict": true, - "outDir": "dist", "rootDir": "src" }, - "include": ["src/**.ts"], - "exclude": ["node_modules"] + "include": [ + "src/**.ts" + ] } diff --git a/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/index.ts b/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/index.ts index 86374a243f2b..62a8eefe78e1 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/index.ts +++ b/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/index.ts @@ -6,7 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import * as coreHttp from "@azure/core-http"; +import * as coreClient from "@azure/core-client"; export type SourceNodeBaseUnion = | SourceNodeBase @@ -29,7 +29,8 @@ export type EndpointBaseUnion = EndpointBase | UnsecuredEndpoint | TlsEndpoint; export type CredentialsBaseUnion = | CredentialsBase | UsernamePasswordCredentials - | HttpHeaderCredentials; + | HttpHeaderCredentials + | SymmetricKeyCredentials; export type CertificateSourceUnion = CertificateSource | PemCertificateList; export type NamedLineBaseUnion = NamedLineBase | NamedLineString; export type ImageFormatPropertiesUnion = @@ -227,7 +228,8 @@ export interface CredentialsBase { /** Polymorphic discriminator, which specifies the different types this object can be */ "@type": | "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials" - | "#Microsoft.VideoAnalyzer.HttpHeaderCredentials"; + | "#Microsoft.VideoAnalyzer.HttpHeaderCredentials" + | "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials"; } /** Base class for certificate sources. */ @@ -244,6 +246,12 @@ export interface TlsValidationOptions { ignoreSignature?: string; } +/** Options for changing video publishing behavior on the video sink and output video. */ +export interface VideoPublishingOptions { + /** When set to 'true' the video will publish preview images. Default is 'false'. */ + enableVideoPreviewImage?: string; +} + /** Optional video properties to be used in case a new video resource needs to be created on the service. These will not take effect if the video already exists. */ export interface VideoCreationProperties { /** Optional video title provided by the user. Value can be up to 256 characters long. */ @@ -252,6 +260,8 @@ export interface VideoCreationProperties { description?: string; /** Video segment length indicates the length of individual video files (segments) which are persisted to storage. Smaller segments provide lower archive playback latency but generate larger volume of storage transactions. Larger segments reduce the amount of storage transactions while increasing the archive playback latency. Value must be specified in ISO8601 duration format (i.e. "PT30S" equals 30 seconds) and can vary between 30 seconds to 5 minutes, in 30 seconds increments. Changing this value after the video is initially created can lead to errors when uploading media to the archive. Default value is 30 seconds. */ segmentLength?: string; + /** Video retention period indicates how long the video is kept in storage, and must be a multiple of 1 day. For example, if this is set to 30 days, then content older than 30 days will be deleted. */ + retentionPeriod?: string; } /** Base class for named lines. */ @@ -362,12 +372,180 @@ export interface SpatialAnalysisPersonLineCrossingLineEvents { events?: SpatialAnalysisPersonLineCrossingEvent[]; } +/** The Video Analyzer edge module can act as a transparent gateway for video, enabling IoT devices to send video to the cloud from behind a firewall. A remote device adapter should be created for each such IoT device. Communication between the cloud and IoT device would then flow via the Video Analyzer edge module. */ +export interface RemoteDeviceAdapter { + /** The unique identifier for the remote device adapter. */ + name: string; + /** Read-only system metadata associated with this object. */ + systemData?: SystemData; + /** Properties of the remote device adapter. */ + properties?: RemoteDeviceAdapterProperties; +} + +/** Remote device adapter properties. */ +export interface RemoteDeviceAdapterProperties { + /** An optional description for the remote device adapter. */ + description?: string; + /** The IoT device to which this remote device will connect. */ + target: RemoteDeviceAdapterTarget; + /** Information that enables communication between the IoT Hub and the IoT device - allowing this edge module to act as a transparent gateway between the two. */ + iotHubDeviceConnection: IotHubDeviceConnection; +} + +/** Properties of the remote device adapter target. */ +export interface RemoteDeviceAdapterTarget { + /** Hostname or IP address of the remote device. */ + host: string; +} + +/** Information that enables communication between the IoT Hub and the IoT device - allowing this edge module to act as a transparent gateway between the two. */ +export interface IotHubDeviceConnection { + /** The name of the IoT device configured and managed in IoT Hub. (case-sensitive) */ + deviceId: string; + /** IoT device connection credentials. Currently IoT device symmetric key credentials are supported. */ + credentials?: CredentialsBaseUnion; +} + +/** A list of remote device adapters. */ +export interface RemoteDeviceAdapterCollection { + /** An array of remote device adapters. */ + value?: RemoteDeviceAdapter[]; + /** A continuation token to use in subsequent calls to enumerate through the remote device adapter collection. This is used when the collection contains too many results to return in one response. */ + continuationToken?: string; +} + +/** A list of ONVIF devices that were discovered in the same subnet as the IoT Edge device. */ +export interface DiscoveredOnvifDeviceCollection { + /** An array of ONVIF devices that have been discovered in the same subnet as the IoT Edge device. */ + value?: DiscoveredOnvifDevice[]; +} + +/** The discovered properties of the ONVIF device that are returned during the discovery. */ +export interface DiscoveredOnvifDevice { + /** The unique identifier of the ONVIF device that was discovered in the same subnet as the IoT Edge device. */ + serviceIdentifier?: string; + /** The IP address of the ONVIF device that was discovered in the same subnet as the IoT Edge device. */ + remoteIPAddress?: string; + /** An array of hostnames for the ONVIF discovered devices that are in the same subnet as the IoT Edge device. */ + scopes?: string[]; + /** An array of media profile endpoints that the ONVIF discovered device supports. */ + endpoints?: string[]; +} + +/** The ONVIF device properties. */ +export interface OnvifDevice { + /** The hostname of the ONVIF device. */ + hostname?: OnvifHostName; + /** The system date and time of the ONVIF device. */ + systemDateTime?: OnvifSystemDateTime; + /** The ONVIF device DNS properties. */ + dns?: OnvifDns; + /** An array of of ONVIF media profiles supported by the ONVIF device. */ + mediaProfiles?: MediaProfile[]; +} + +/** The ONVIF device DNS properties. */ +export interface OnvifHostName { + /** Result value showing if the ONVIF device is configured to use DHCP. */ + fromDhcp?: boolean; + /** The hostname of the ONVIF device. */ + hostname?: string; +} + +/** The ONVIF device DNS properties. */ +export interface OnvifSystemDateTime { + /** An enum value determining whether the date time was configured using NTP or manual. */ + type?: OnvifSystemDateTimeType; + /** The device datetime returned when calling the request. */ + time?: string; + /** The timezone of the ONVIF device datetime. */ + timeZone?: string; +} + +/** The ONVIF device DNS properties. */ +export interface OnvifDns { + /** Result value showing if the ONVIF device is configured to use DHCP. */ + fromDhcp?: boolean; + /** An array of IPv4 address for the discovered ONVIF device. */ + ipv4Address?: string[]; + /** An array of IPv6 address for the discovered ONVIF device. */ + ipv6Address?: string[]; +} + +/** Class representing the ONVIF MediaProfiles. */ +export interface MediaProfile { + /** The name of the Media Profile. */ + name?: string; + /** Object representing the URI that will be used to request for media streaming. */ + mediaUri?: Record; + /** The Video encoder configuration. */ + videoEncoderConfiguration?: VideoEncoderConfiguration; +} + +/** Class representing the MPEG4 Configuration. */ +export interface VideoEncoderConfiguration { + /** The video codec used by the Media Profile. */ + encoding?: VideoEncoding; + /** Relative value representing the quality of the video. */ + quality?: number; + /** The Video Resolution. */ + resolution?: VideoResolution; + /** The Video's rate control. */ + rateControl?: RateControl; + /** The H264 Configuration. */ + h264?: H264Configuration; + /** The H264 Configuration. */ + mpeg4?: Mpeg4Configuration; +} + +/** The Video resolution. */ +export interface VideoResolution { + /** The number of columns of the Video image. */ + width?: number; + /** The number of lines of the Video image. */ + height?: number; +} + +/** Class representing the video's rate control. */ +export interface RateControl { + /** the maximum output bitrate in kbps. */ + bitRateLimit?: number; + /** Interval at which images are encoded and transmitted. */ + encodingInterval?: number; + /** Maximum output framerate in fps. */ + frameRateLimit?: number; + /** A value of true indicates that frame rate is a fixed value rather than an upper limit, and that the video encoder shall prioritize frame rate over all other adaptable configuration values such as bitrate. */ + guaranteedFrameRate?: boolean; +} + +/** Class representing the H264 Configuration. */ +export interface H264Configuration { + /** Group of Video frames length. */ + govLength?: number; + /** The H264 Profile */ + profile?: H264Profile; +} + +/** Class representing the MPEG4 Configuration. */ +export interface Mpeg4Configuration { + /** Group of Video frames length. */ + govLength?: number; + /** The MPEG4 Profile */ + profile?: Mpeg4Profile; +} + +/** Object representing the URI that will be used to request for media streaming. */ +export interface MediaUri { + /** URI that can be used for media streaming. */ + uri?: string; +} + /** Base class for direct method calls. */ export interface MethodRequest { /** Polymorphic discriminator, which specifies the different types this object can be */ methodName: "undefined"; /** Video Analyzer API version. */ - apiVersion?: "1.0"; + apiVersion?: "1.1"; } /** RTSP source allows for media from an RTSP camera or generic RTSP server to be ingested into a live pipeline. */ @@ -419,7 +597,10 @@ export type LineCrossingProcessor = ProcessorNodeBase & { /** Base class for pipeline extension processors. Pipeline extensions allow for custom media analysis and processing to be plugged into the Video Analyzer pipeline. */ export type ExtensionProcessorBase = ProcessorNodeBase & { /** Polymorphic discriminator, which specifies the different types this object can be */ - "@type": "#Microsoft.VideoAnalyzer.ExtensionProcessorBase"; + "@type": + | "#Microsoft.VideoAnalyzer.ExtensionProcessorBase" + | "#Microsoft.VideoAnalyzer.GrpcExtension" + | "#Microsoft.VideoAnalyzer.HttpExtension"; /** Endpoint details of the pipeline extension plugin. */ endpoint: EndpointBaseUnion; /** Image transformations and formatting options to be applied to the video frame(s) prior submission to the pipeline extension plugin. */ @@ -484,6 +665,8 @@ export type VideoSink = SinkNodeBase & { videoName: string; /** Optional video properties to be used in case a new video resource needs to be created on the service. */ videoCreationProperties?: VideoCreationProperties; + /** Optional video publishing options to be used for changing publishing behavior of the output video. */ + videoPublishingOptions?: VideoPublishingOptions; /** Path to a local file system directory for caching of temporary media files. This will also be used to store content which cannot be immediately uploaded to Azure due to Internet connectivity issues. */ localMediaCachePath: string; /** Maximum amount of disk space that can be used for caching of temporary media files. Once this limit is reached, the oldest segments of the media archive will be continuously deleted in order to make space for new media, thus leading to gaps in the cloud recorded content. */ @@ -526,6 +709,14 @@ export type HttpHeaderCredentials = CredentialsBase & { headerValue: string; }; +/** Symmetric key credential. */ +export type SymmetricKeyCredentials = CredentialsBase & { + /** Polymorphic discriminator, which specifies the different types this object can be */ + "@type": "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials"; + /** Symmetric key credential. */ + key: string; +}; + /** A list of PEM formatted certificates. */ export type PemCertificateList = CertificateSource & { /** Polymorphic discriminator, which specifies the different types this object can be */ @@ -589,13 +780,24 @@ export type SpatialAnalysisCustomOperation = SpatialAnalysisOperationBase & { /** Base class for Azure Cognitive Services Spatial Analysis typed operations. */ export type SpatialAnalysisTypedOperationBase = SpatialAnalysisOperationBase & { /** Polymorphic discriminator, which specifies the different types this object can be */ - "@type": "SpatialAnalysisTypedOperationBase"; + "@type": + | "SpatialAnalysisTypedOperationBase" + | "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonCountOperation" + | "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonZoneCrossingOperation" + | "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonDistanceOperation" + | "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonLineCrossingOperation"; /** If set to 'true', enables debugging mode for this operation. */ debug?: string; + /** Advanced calibration configuration. */ + calibrationConfiguration?: string; /** Advanced camera configuration. */ cameraConfiguration?: string; + /** Advanced camera calibrator configuration. */ + cameraCalibratorNodeConfiguration?: string; /** Advanced detector node configuration. */ detectorNodeConfiguration?: string; + /** Advanced tracker node configuration. */ + trackerNodeConfiguration?: string; /** If set to 'true', enables face mask detection for this operation. */ enableFaceMaskClassifier?: string; }; @@ -678,7 +880,7 @@ export type SpatialAnalysisPersonLineCrossingOperation = SpatialAnalysisTypedOpe }; /** Known values of {@link LivePipelineState} that the service accepts. */ -export const enum KnownLivePipelineState { +export enum KnownLivePipelineState { /** The live pipeline is idle and not processing media. */ Inactive = "inactive", /** The live pipeline is transitioning into the active state. */ @@ -693,7 +895,7 @@ export const enum KnownLivePipelineState { * Defines values for LivePipelineState. \ * {@link KnownLivePipelineState} can be used interchangeably with LivePipelineState, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **inactive**: The live pipeline is idle and not processing media. \ * **activating**: The live pipeline is transitioning into the active state. \ * **active**: The live pipeline is active and able to process media. If your data source is not available, for instance, if your RTSP camera is powered off or unreachable, the pipeline will still be active and periodically retrying the connection. Your Azure subscription will be billed for the duration in which the live pipeline is in the active state. \ @@ -702,7 +904,7 @@ export const enum KnownLivePipelineState { export type LivePipelineState = string; /** Known values of {@link ParameterType} that the service accepts. */ -export const enum KnownParameterType { +export enum KnownParameterType { /** The parameter's value is a string. */ String = "string", /** The parameter's value is a string that holds sensitive information. */ @@ -719,7 +921,7 @@ export const enum KnownParameterType { * Defines values for ParameterType. \ * {@link KnownParameterType} can be used interchangeably with ParameterType, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **string**: The parameter's value is a string. \ * **secretString**: The parameter's value is a string that holds sensitive information. \ * **int**: The parameter's value is a 32-bit signed integer. \ @@ -729,7 +931,7 @@ export const enum KnownParameterType { export type ParameterType = string; /** Known values of {@link OutputSelectorProperty} that the service accepts. */ -export const enum KnownOutputSelectorProperty { +export enum KnownOutputSelectorProperty { /** The stream's MIME type or subtype: audio, video or application */ MediaType = "mediaType" } @@ -738,13 +940,13 @@ export const enum KnownOutputSelectorProperty { * Defines values for OutputSelectorProperty. \ * {@link KnownOutputSelectorProperty} can be used interchangeably with OutputSelectorProperty, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **mediaType**: The stream's MIME type or subtype: audio, video or application */ export type OutputSelectorProperty = string; /** Known values of {@link OutputSelectorOperator} that the service accepts. */ -export const enum KnownOutputSelectorOperator { +export enum KnownOutputSelectorOperator { /** The property is of the type defined by value. */ Is = "is", /** The property is not of the type defined by value. */ @@ -755,14 +957,14 @@ export const enum KnownOutputSelectorOperator { * Defines values for OutputSelectorOperator. \ * {@link KnownOutputSelectorOperator} can be used interchangeably with OutputSelectorOperator, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **is**: The property is of the type defined by value. \ * **isNot**: The property is not of the type defined by value. */ export type OutputSelectorOperator = string; /** Known values of {@link RtspTransport} that the service accepts. */ -export const enum KnownRtspTransport { +export enum KnownRtspTransport { /** HTTP transport. RTSP messages are exchanged over long running HTTP requests and RTP packets are interleaved within the HTTP channel. */ Http = "http", /** TCP transport. RTSP is used directly over TCP and RTP packets are interleaved within the TCP channel. */ @@ -773,14 +975,14 @@ export const enum KnownRtspTransport { * Defines values for RtspTransport. \ * {@link KnownRtspTransport} can be used interchangeably with RtspTransport, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **http**: HTTP transport. RTSP messages are exchanged over long running HTTP requests and RTP packets are interleaved within the HTTP channel. \ * **tcp**: TCP transport. RTSP is used directly over TCP and RTP packets are interleaved within the TCP channel. */ export type RtspTransport = string; /** Known values of {@link MotionDetectionSensitivity} that the service accepts. */ -export const enum KnownMotionDetectionSensitivity { +export enum KnownMotionDetectionSensitivity { /** Low sensitivity. */ Low = "low", /** Medium sensitivity. */ @@ -793,7 +995,7 @@ export const enum KnownMotionDetectionSensitivity { * Defines values for MotionDetectionSensitivity. \ * {@link KnownMotionDetectionSensitivity} can be used interchangeably with MotionDetectionSensitivity, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **low**: Low sensitivity. \ * **medium**: Medium sensitivity. \ * **high**: High sensitivity. @@ -801,7 +1003,7 @@ export const enum KnownMotionDetectionSensitivity { export type MotionDetectionSensitivity = string; /** Known values of {@link ObjectTrackingAccuracy} that the service accepts. */ -export const enum KnownObjectTrackingAccuracy { +export enum KnownObjectTrackingAccuracy { /** Low accuracy. */ Low = "low", /** Medium accuracy. */ @@ -814,7 +1016,7 @@ export const enum KnownObjectTrackingAccuracy { * Defines values for ObjectTrackingAccuracy. \ * {@link KnownObjectTrackingAccuracy} can be used interchangeably with ObjectTrackingAccuracy, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **low**: Low accuracy. \ * **medium**: Medium accuracy. \ * **high**: High accuracy. @@ -822,7 +1024,7 @@ export const enum KnownObjectTrackingAccuracy { export type ObjectTrackingAccuracy = string; /** Known values of {@link ImageScaleMode} that the service accepts. */ -export const enum KnownImageScaleMode { +export enum KnownImageScaleMode { /** Preserves the same aspect ratio as the input image. If only one image dimension is provided, the second dimension is calculated based on the input image aspect ratio. When 2 dimensions are provided, the image is resized to fit the most constraining dimension, considering the input image size and aspect ratio. */ PreserveAspectRatio = "preserveAspectRatio", /** Pads the image with black horizontal stripes (letterbox) or black vertical stripes (pillar-box) so the image is resized to the specified dimensions while not altering the content aspect ratio. */ @@ -835,7 +1037,7 @@ export const enum KnownImageScaleMode { * Defines values for ImageScaleMode. \ * {@link KnownImageScaleMode} can be used interchangeably with ImageScaleMode, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **preserveAspectRatio**: Preserves the same aspect ratio as the input image. If only one image dimension is provided, the second dimension is calculated based on the input image aspect ratio. When 2 dimensions are provided, the image is resized to fit the most constraining dimension, considering the input image size and aspect ratio. \ * **pad**: Pads the image with black horizontal stripes (letterbox) or black vertical stripes (pillar-box) so the image is resized to the specified dimensions while not altering the content aspect ratio. \ * **stretch**: Stretches the original image so it resized to the specified dimensions. @@ -843,7 +1045,7 @@ export const enum KnownImageScaleMode { export type ImageScaleMode = string; /** Known values of {@link GrpcExtensionDataTransferMode} that the service accepts. */ -export const enum KnownGrpcExtensionDataTransferMode { +export enum KnownGrpcExtensionDataTransferMode { /** Media samples are embedded into the gRPC messages. This mode is less efficient but it requires a simpler implementations and can be used with plugins which are not on the same node as the Video Analyzer module. */ Embedded = "embedded", /** Media samples are made available through shared memory. This mode enables efficient data transfers but it requires that the extension plugin to be co-located on the same node and sharing the same shared memory space. */ @@ -854,14 +1056,14 @@ export const enum KnownGrpcExtensionDataTransferMode { * Defines values for GrpcExtensionDataTransferMode. \ * {@link KnownGrpcExtensionDataTransferMode} can be used interchangeably with GrpcExtensionDataTransferMode, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **embedded**: Media samples are embedded into the gRPC messages. This mode is less efficient but it requires a simpler implementations and can be used with plugins which are not on the same node as the Video Analyzer module. \ * **sharedMemory**: Media samples are made available through shared memory. This mode enables efficient data transfers but it requires that the extension plugin to be co-located on the same node and sharing the same shared memory space. */ export type GrpcExtensionDataTransferMode = string; /** Known values of {@link ImageFormatRawPixelFormat} that the service accepts. */ -export const enum KnownImageFormatRawPixelFormat { +export enum KnownImageFormatRawPixelFormat { /** Planar YUV 4:2:0, 12bpp, (1 Cr and Cb sample per 2x2 Y samples). */ Yuv420P = "yuv420p", /** Packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian. */ @@ -890,7 +1092,7 @@ export const enum KnownImageFormatRawPixelFormat { * Defines values for ImageFormatRawPixelFormat. \ * {@link KnownImageFormatRawPixelFormat} can be used interchangeably with ImageFormatRawPixelFormat, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **yuv420p**: Planar YUV 4:2:0, 12bpp, (1 Cr and Cb sample per 2x2 Y samples). \ * **rgb565be**: Packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian. \ * **rgb565le**: Packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian. \ @@ -906,7 +1108,7 @@ export const enum KnownImageFormatRawPixelFormat { export type ImageFormatRawPixelFormat = string; /** Known values of {@link SpatialAnalysisOperationFocus} that the service accepts. */ -export const enum KnownSpatialAnalysisOperationFocus { +export enum KnownSpatialAnalysisOperationFocus { /** The center of the object. */ Center = "center", /** The bottom center of the object. */ @@ -919,7 +1121,7 @@ export const enum KnownSpatialAnalysisOperationFocus { * Defines values for SpatialAnalysisOperationFocus. \ * {@link KnownSpatialAnalysisOperationFocus} can be used interchangeably with SpatialAnalysisOperationFocus, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **center**: The center of the object. \ * **bottomCenter**: The bottom center of the object. \ * **footprint**: The footprint. @@ -927,7 +1129,7 @@ export const enum KnownSpatialAnalysisOperationFocus { export type SpatialAnalysisOperationFocus = string; /** Known values of {@link SpatialAnalysisPersonCountEventTrigger} that the service accepts. */ -export const enum KnownSpatialAnalysisPersonCountEventTrigger { +export enum KnownSpatialAnalysisPersonCountEventTrigger { /** Event trigger. */ Event = "event", /** Interval trigger. */ @@ -938,14 +1140,14 @@ export const enum KnownSpatialAnalysisPersonCountEventTrigger { * Defines values for SpatialAnalysisPersonCountEventTrigger. \ * {@link KnownSpatialAnalysisPersonCountEventTrigger} can be used interchangeably with SpatialAnalysisPersonCountEventTrigger, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **event**: Event trigger. \ * **interval**: Interval trigger. */ export type SpatialAnalysisPersonCountEventTrigger = string; /** Known values of {@link SpatialAnalysisPersonZoneCrossingEventType} that the service accepts. */ -export const enum KnownSpatialAnalysisPersonZoneCrossingEventType { +export enum KnownSpatialAnalysisPersonZoneCrossingEventType { /** Zone crossing event type. */ ZoneCrossing = "zoneCrossing", /** Zone dwell time event type. */ @@ -956,14 +1158,14 @@ export const enum KnownSpatialAnalysisPersonZoneCrossingEventType { * Defines values for SpatialAnalysisPersonZoneCrossingEventType. \ * {@link KnownSpatialAnalysisPersonZoneCrossingEventType} can be used interchangeably with SpatialAnalysisPersonZoneCrossingEventType, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **zoneCrossing**: Zone crossing event type. \ * **zoneDwellTime**: Zone dwell time event type. */ export type SpatialAnalysisPersonZoneCrossingEventType = string; /** Known values of {@link SpatialAnalysisPersonDistanceEventTrigger} that the service accepts. */ -export const enum KnownSpatialAnalysisPersonDistanceEventTrigger { +export enum KnownSpatialAnalysisPersonDistanceEventTrigger { /** Event trigger. */ Event = "event", /** Interval trigger. */ @@ -974,15 +1176,90 @@ export const enum KnownSpatialAnalysisPersonDistanceEventTrigger { * Defines values for SpatialAnalysisPersonDistanceEventTrigger. \ * {@link KnownSpatialAnalysisPersonDistanceEventTrigger} can be used interchangeably with SpatialAnalysisPersonDistanceEventTrigger, * this enum contains the known values that the service supports. - * ### Know values supported by the service + * ### Known values supported by the service * **event**: Event trigger. \ * **interval**: Interval trigger. */ export type SpatialAnalysisPersonDistanceEventTrigger = string; +/** Known values of {@link OnvifSystemDateTimeType} that the service accepts. */ +export enum KnownOnvifSystemDateTimeType { + Ntp = "Ntp", + Manual = "Manual" +} + +/** + * Defines values for OnvifSystemDateTimeType. \ + * {@link KnownOnvifSystemDateTimeType} can be used interchangeably with OnvifSystemDateTimeType, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **Ntp** \ + * **Manual** + */ +export type OnvifSystemDateTimeType = string; + +/** Known values of {@link VideoEncoding} that the service accepts. */ +export enum KnownVideoEncoding { + /** The Media Profile uses JPEG encoding. */ + Jpeg = "JPEG", + /** The Media Profile uses H264 encoding. */ + H264 = "H264", + /** The Media Profile uses MPEG4 encoding. */ + Mpeg4 = "MPEG4" +} + +/** + * Defines values for VideoEncoding. \ + * {@link KnownVideoEncoding} can be used interchangeably with VideoEncoding, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **JPEG**: The Media Profile uses JPEG encoding. \ + * **H264**: The Media Profile uses H264 encoding. \ + * **MPEG4**: The Media Profile uses MPEG4 encoding. + */ +export type VideoEncoding = string; + +/** Known values of {@link H264Profile} that the service accepts. */ +export enum KnownH264Profile { + Baseline = "Baseline", + Main = "Main", + Extended = "Extended", + High = "High" +} + +/** + * Defines values for H264Profile. \ + * {@link KnownH264Profile} can be used interchangeably with H264Profile, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **Baseline** \ + * **Main** \ + * **Extended** \ + * **High** + */ +export type H264Profile = string; + +/** Known values of {@link Mpeg4Profile} that the service accepts. */ +export enum KnownMpeg4Profile { + /** Simple Profile. */ + SP = "SP", + /** Advanced Simple Profile. */ + ASP = "ASP" +} + +/** + * Defines values for Mpeg4Profile. \ + * {@link KnownMpeg4Profile} can be used interchangeably with Mpeg4Profile, + * this enum contains the known values that the service supports. + * ### Known values supported by the service + * **SP**: Simple Profile. \ + * **ASP**: Advanced Simple Profile. + */ +export type Mpeg4Profile = string; + /** Optional parameters. */ export interface GeneratedClientOptionalParams - extends coreHttp.ServiceClientOptions { + extends coreClient.ServiceClientOptions { /** Overrides client endpoint. */ endpoint?: string; } diff --git a/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/mappers.ts b/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/mappers.ts index 9aafd7235a7b..00747642cc2a 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/mappers.ts +++ b/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/mappers.ts @@ -6,9 +6,9 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import * as coreHttp from "@azure/core-http"; +import * as coreClient from "@azure/core-client"; -export const LivePipeline: coreHttp.CompositeMapper = { +export const LivePipeline: coreClient.CompositeMapper = { type: { name: "Composite", className: "LivePipeline", @@ -38,7 +38,7 @@ export const LivePipeline: coreHttp.CompositeMapper = { } }; -export const SystemData: coreHttp.CompositeMapper = { +export const SystemData: coreClient.CompositeMapper = { type: { name: "Composite", className: "SystemData", @@ -59,7 +59,7 @@ export const SystemData: coreHttp.CompositeMapper = { } }; -export const LivePipelineProperties: coreHttp.CompositeMapper = { +export const LivePipelineProperties: coreClient.CompositeMapper = { type: { name: "Composite", className: "LivePipelineProperties", @@ -98,7 +98,7 @@ export const LivePipelineProperties: coreHttp.CompositeMapper = { } }; -export const ParameterDefinition: coreHttp.CompositeMapper = { +export const ParameterDefinition: coreClient.CompositeMapper = { type: { name: "Composite", className: "ParameterDefinition", @@ -120,7 +120,7 @@ export const ParameterDefinition: coreHttp.CompositeMapper = { } }; -export const LivePipelineCollection: coreHttp.CompositeMapper = { +export const LivePipelineCollection: coreClient.CompositeMapper = { type: { name: "Composite", className: "LivePipelineCollection", @@ -147,7 +147,7 @@ export const LivePipelineCollection: coreHttp.CompositeMapper = { } }; -export const PipelineTopologyCollection: coreHttp.CompositeMapper = { +export const PipelineTopologyCollection: coreClient.CompositeMapper = { type: { name: "Composite", className: "PipelineTopologyCollection", @@ -174,7 +174,7 @@ export const PipelineTopologyCollection: coreHttp.CompositeMapper = { } }; -export const PipelineTopology: coreHttp.CompositeMapper = { +export const PipelineTopology: coreClient.CompositeMapper = { type: { name: "Composite", className: "PipelineTopology", @@ -204,7 +204,7 @@ export const PipelineTopology: coreHttp.CompositeMapper = { } }; -export const PipelineTopologyProperties: coreHttp.CompositeMapper = { +export const PipelineTopologyProperties: coreClient.CompositeMapper = { type: { name: "Composite", className: "PipelineTopologyProperties", @@ -267,7 +267,7 @@ export const PipelineTopologyProperties: coreHttp.CompositeMapper = { } }; -export const ParameterDeclaration: coreHttp.CompositeMapper = { +export const ParameterDeclaration: coreClient.CompositeMapper = { type: { name: "Composite", className: "ParameterDeclaration", @@ -305,7 +305,7 @@ export const ParameterDeclaration: coreHttp.CompositeMapper = { } }; -export const SourceNodeBase: coreHttp.CompositeMapper = { +export const SourceNodeBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "SourceNodeBase", @@ -333,7 +333,7 @@ export const SourceNodeBase: coreHttp.CompositeMapper = { } }; -export const ProcessorNodeBase: coreHttp.CompositeMapper = { +export const ProcessorNodeBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "ProcessorNodeBase", @@ -374,7 +374,7 @@ export const ProcessorNodeBase: coreHttp.CompositeMapper = { } }; -export const NodeInput: coreHttp.CompositeMapper = { +export const NodeInput: coreClient.CompositeMapper = { type: { name: "Composite", className: "NodeInput", @@ -402,7 +402,7 @@ export const NodeInput: coreHttp.CompositeMapper = { } }; -export const OutputSelector: coreHttp.CompositeMapper = { +export const OutputSelector: coreClient.CompositeMapper = { type: { name: "Composite", className: "OutputSelector", @@ -429,7 +429,7 @@ export const OutputSelector: coreHttp.CompositeMapper = { } }; -export const SinkNodeBase: coreHttp.CompositeMapper = { +export const SinkNodeBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "SinkNodeBase", @@ -470,7 +470,7 @@ export const SinkNodeBase: coreHttp.CompositeMapper = { } }; -export const EndpointBase: coreHttp.CompositeMapper = { +export const EndpointBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "EndpointBase", @@ -505,7 +505,7 @@ export const EndpointBase: coreHttp.CompositeMapper = { } }; -export const CredentialsBase: coreHttp.CompositeMapper = { +export const CredentialsBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "CredentialsBase", @@ -526,7 +526,7 @@ export const CredentialsBase: coreHttp.CompositeMapper = { } }; -export const CertificateSource: coreHttp.CompositeMapper = { +export const CertificateSource: coreClient.CompositeMapper = { type: { name: "Composite", className: "CertificateSource", @@ -547,7 +547,7 @@ export const CertificateSource: coreHttp.CompositeMapper = { } }; -export const TlsValidationOptions: coreHttp.CompositeMapper = { +export const TlsValidationOptions: coreClient.CompositeMapper = { type: { name: "Composite", className: "TlsValidationOptions", @@ -568,7 +568,22 @@ export const TlsValidationOptions: coreHttp.CompositeMapper = { } }; -export const VideoCreationProperties: coreHttp.CompositeMapper = { +export const VideoPublishingOptions: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "VideoPublishingOptions", + modelProperties: { + enableVideoPreviewImage: { + serializedName: "enableVideoPreviewImage", + type: { + name: "String" + } + } + } + } +}; + +export const VideoCreationProperties: coreClient.CompositeMapper = { type: { name: "Composite", className: "VideoCreationProperties", @@ -590,12 +605,18 @@ export const VideoCreationProperties: coreHttp.CompositeMapper = { type: { name: "String" } + }, + retentionPeriod: { + serializedName: "retentionPeriod", + type: { + name: "String" + } } } } }; -export const NamedLineBase: coreHttp.CompositeMapper = { +export const NamedLineBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "NamedLineBase", @@ -623,7 +644,7 @@ export const NamedLineBase: coreHttp.CompositeMapper = { } }; -export const ImageProperties: coreHttp.CompositeMapper = { +export const ImageProperties: coreClient.CompositeMapper = { type: { name: "Composite", className: "ImageProperties", @@ -646,7 +667,7 @@ export const ImageProperties: coreHttp.CompositeMapper = { } }; -export const ImageScale: coreHttp.CompositeMapper = { +export const ImageScale: coreClient.CompositeMapper = { type: { name: "Composite", className: "ImageScale", @@ -673,7 +694,7 @@ export const ImageScale: coreHttp.CompositeMapper = { } }; -export const ImageFormatProperties: coreHttp.CompositeMapper = { +export const ImageFormatProperties: coreClient.CompositeMapper = { type: { name: "Composite", className: "ImageFormatProperties", @@ -694,7 +715,7 @@ export const ImageFormatProperties: coreHttp.CompositeMapper = { } }; -export const SamplingOptions: coreHttp.CompositeMapper = { +export const SamplingOptions: coreClient.CompositeMapper = { type: { name: "Composite", className: "SamplingOptions", @@ -715,7 +736,7 @@ export const SamplingOptions: coreHttp.CompositeMapper = { } }; -export const GrpcExtensionDataTransfer: coreHttp.CompositeMapper = { +export const GrpcExtensionDataTransfer: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.GrpcExtensionDataTransfer", type: { name: "Composite", @@ -738,7 +759,7 @@ export const GrpcExtensionDataTransfer: coreHttp.CompositeMapper = { } }; -export const NamedPolygonBase: coreHttp.CompositeMapper = { +export const NamedPolygonBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "NamedPolygonBase", @@ -766,7 +787,7 @@ export const NamedPolygonBase: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisOperationBase: coreHttp.CompositeMapper = { +export const SpatialAnalysisOperationBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisOperationBase", @@ -787,7 +808,7 @@ export const SpatialAnalysisOperationBase: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisOperationEventBase: coreHttp.CompositeMapper = { +export const SpatialAnalysisOperationEventBase: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisOperationEventBase", @@ -808,7 +829,7 @@ export const SpatialAnalysisOperationEventBase: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisPersonCountZoneEvents: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonCountZoneEvents: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisPersonCountZoneEvents", @@ -836,7 +857,7 @@ export const SpatialAnalysisPersonCountZoneEvents: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisPersonZoneCrossingZoneEvents: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonZoneCrossingZoneEvents: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisPersonZoneCrossingZoneEvents", @@ -864,7 +885,7 @@ export const SpatialAnalysisPersonZoneCrossingZoneEvents: coreHttp.CompositeMapp } }; -export const SpatialAnalysisPersonDistanceZoneEvents: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonDistanceZoneEvents: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisPersonDistanceZoneEvents", @@ -892,7 +913,7 @@ export const SpatialAnalysisPersonDistanceZoneEvents: coreHttp.CompositeMapper = } }; -export const SpatialAnalysisPersonLineCrossingLineEvents: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonLineCrossingLineEvents: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisPersonLineCrossingLineEvents", @@ -920,7 +941,512 @@ export const SpatialAnalysisPersonLineCrossingLineEvents: coreHttp.CompositeMapp } }; -export const MethodRequest: coreHttp.CompositeMapper = { +export const RemoteDeviceAdapter: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "RemoteDeviceAdapter", + modelProperties: { + name: { + serializedName: "name", + required: true, + type: { + name: "String" + } + }, + systemData: { + serializedName: "systemData", + type: { + name: "Composite", + className: "SystemData" + } + }, + properties: { + serializedName: "properties", + type: { + name: "Composite", + className: "RemoteDeviceAdapterProperties" + } + } + } + } +}; + +export const RemoteDeviceAdapterProperties: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "RemoteDeviceAdapterProperties", + modelProperties: { + description: { + serializedName: "description", + type: { + name: "String" + } + }, + target: { + serializedName: "target", + type: { + name: "Composite", + className: "RemoteDeviceAdapterTarget" + } + }, + iotHubDeviceConnection: { + serializedName: "iotHubDeviceConnection", + type: { + name: "Composite", + className: "IotHubDeviceConnection" + } + } + } + } +}; + +export const RemoteDeviceAdapterTarget: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "RemoteDeviceAdapterTarget", + modelProperties: { + host: { + serializedName: "host", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const IotHubDeviceConnection: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "IotHubDeviceConnection", + modelProperties: { + deviceId: { + serializedName: "deviceId", + required: true, + type: { + name: "String" + } + }, + credentials: { + serializedName: "credentials", + type: { + name: "Composite", + className: "CredentialsBase" + } + } + } + } +}; + +export const RemoteDeviceAdapterCollection: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "RemoteDeviceAdapterCollection", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "RemoteDeviceAdapter" + } + } + } + }, + continuationToken: { + serializedName: "@continuationToken", + type: { + name: "String" + } + } + } + } +}; + +export const DiscoveredOnvifDeviceCollection: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DiscoveredOnvifDeviceCollection", + modelProperties: { + value: { + serializedName: "value", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "DiscoveredOnvifDevice" + } + } + } + } + } + } +}; + +export const DiscoveredOnvifDevice: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "DiscoveredOnvifDevice", + modelProperties: { + serviceIdentifier: { + serializedName: "serviceIdentifier", + type: { + name: "String" + } + }, + remoteIPAddress: { + serializedName: "remoteIPAddress", + type: { + name: "String" + } + }, + scopes: { + serializedName: "scopes", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + endpoints: { + serializedName: "endpoints", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const OnvifDevice: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "OnvifDevice", + modelProperties: { + hostname: { + serializedName: "hostname", + type: { + name: "Composite", + className: "OnvifHostName" + } + }, + systemDateTime: { + serializedName: "systemDateTime", + type: { + name: "Composite", + className: "OnvifSystemDateTime" + } + }, + dns: { + serializedName: "dns", + type: { + name: "Composite", + className: "OnvifDns" + } + }, + mediaProfiles: { + serializedName: "mediaProfiles", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "MediaProfile" + } + } + } + } + } + } +}; + +export const OnvifHostName: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "OnvifHostName", + modelProperties: { + fromDhcp: { + serializedName: "fromDhcp", + type: { + name: "Boolean" + } + }, + hostname: { + serializedName: "hostname", + type: { + name: "String" + } + } + } + } +}; + +export const OnvifSystemDateTime: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "OnvifSystemDateTime", + modelProperties: { + type: { + serializedName: "type", + type: { + name: "String" + } + }, + time: { + serializedName: "time", + type: { + name: "String" + } + }, + timeZone: { + serializedName: "timeZone", + type: { + name: "String" + } + } + } + } +}; + +export const OnvifDns: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "OnvifDns", + modelProperties: { + fromDhcp: { + serializedName: "fromDhcp", + type: { + name: "Boolean" + } + }, + ipv4Address: { + serializedName: "ipv4Address", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + ipv6Address: { + serializedName: "ipv6Address", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; + +export const MediaProfile: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "MediaProfile", + modelProperties: { + name: { + serializedName: "name", + type: { + name: "String" + } + }, + mediaUri: { + serializedName: "mediaUri", + type: { + name: "Dictionary", + value: { type: { name: "any" } } + } + }, + videoEncoderConfiguration: { + serializedName: "videoEncoderConfiguration", + type: { + name: "Composite", + className: "VideoEncoderConfiguration" + } + } + } + } +}; + +export const VideoEncoderConfiguration: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "VideoEncoderConfiguration", + modelProperties: { + encoding: { + serializedName: "encoding", + type: { + name: "String" + } + }, + quality: { + serializedName: "quality", + type: { + name: "Number" + } + }, + resolution: { + serializedName: "resolution", + type: { + name: "Composite", + className: "VideoResolution" + } + }, + rateControl: { + serializedName: "rateControl", + type: { + name: "Composite", + className: "RateControl" + } + }, + h264: { + serializedName: "h264", + type: { + name: "Composite", + className: "H264Configuration" + } + }, + mpeg4: { + serializedName: "mpeg4", + type: { + name: "Composite", + className: "Mpeg4Configuration" + } + } + } + } +}; + +export const VideoResolution: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "VideoResolution", + modelProperties: { + width: { + serializedName: "width", + type: { + name: "Number" + } + }, + height: { + serializedName: "height", + type: { + name: "Number" + } + } + } + } +}; + +export const RateControl: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "RateControl", + modelProperties: { + bitRateLimit: { + serializedName: "bitRateLimit", + type: { + name: "Number" + } + }, + encodingInterval: { + serializedName: "encodingInterval", + type: { + name: "Number" + } + }, + frameRateLimit: { + serializedName: "frameRateLimit", + type: { + name: "Number" + } + }, + guaranteedFrameRate: { + serializedName: "guaranteedFrameRate", + type: { + name: "Boolean" + } + } + } + } +}; + +export const H264Configuration: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "H264Configuration", + modelProperties: { + govLength: { + serializedName: "govLength", + type: { + name: "Number" + } + }, + profile: { + serializedName: "profile", + type: { + name: "String" + } + } + } + } +}; + +export const Mpeg4Configuration: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "Mpeg4Configuration", + modelProperties: { + govLength: { + serializedName: "govLength", + type: { + name: "Number" + } + }, + profile: { + serializedName: "profile", + type: { + name: "String" + } + } + } + } +}; + +export const MediaUri: coreClient.CompositeMapper = { + type: { + name: "Composite", + className: "MediaUri", + modelProperties: { + uri: { + serializedName: "uri", + type: { + name: "String" + } + } + } + } +}; + +export const MethodRequest: coreClient.CompositeMapper = { type: { name: "Composite", className: "MethodRequest", @@ -939,7 +1465,7 @@ export const MethodRequest: coreHttp.CompositeMapper = { } }, apiVersion: { - defaultValue: "1.0", + defaultValue: "1.1", isConstant: true, serializedName: "@apiVersion", type: { @@ -950,7 +1476,7 @@ export const MethodRequest: coreHttp.CompositeMapper = { } }; -export const RtspSource: coreHttp.CompositeMapper = { +export const RtspSource: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.RtspSource", type: { name: "Composite", @@ -976,7 +1502,7 @@ export const RtspSource: coreHttp.CompositeMapper = { } }; -export const IotHubMessageSource: coreHttp.CompositeMapper = { +export const IotHubMessageSource: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.IotHubMessageSource", type: { name: "Composite", @@ -995,7 +1521,7 @@ export const IotHubMessageSource: coreHttp.CompositeMapper = { } }; -export const MotionDetectionProcessor: coreHttp.CompositeMapper = { +export const MotionDetectionProcessor: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.MotionDetectionProcessor", type: { name: "Composite", @@ -1026,7 +1552,7 @@ export const MotionDetectionProcessor: coreHttp.CompositeMapper = { } }; -export const ObjectTrackingProcessor: coreHttp.CompositeMapper = { +export const ObjectTrackingProcessor: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.ObjectTrackingProcessor", type: { name: "Composite", @@ -1045,7 +1571,7 @@ export const ObjectTrackingProcessor: coreHttp.CompositeMapper = { } }; -export const LineCrossingProcessor: coreHttp.CompositeMapper = { +export const LineCrossingProcessor: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.LineCrossingProcessor", type: { name: "Composite", @@ -1071,7 +1597,7 @@ export const LineCrossingProcessor: coreHttp.CompositeMapper = { } }; -export const ExtensionProcessorBase: coreHttp.CompositeMapper = { +export const ExtensionProcessorBase: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.ExtensionProcessorBase", type: { name: "Composite", @@ -1108,7 +1634,7 @@ export const ExtensionProcessorBase: coreHttp.CompositeMapper = { } }; -export const SignalGateProcessor: coreHttp.CompositeMapper = { +export const SignalGateProcessor: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.SignalGateProcessor", type: { name: "Composite", @@ -1145,7 +1671,7 @@ export const SignalGateProcessor: coreHttp.CompositeMapper = { } }; -export const CognitiveServicesVisionProcessor: coreHttp.CompositeMapper = { +export const CognitiveServicesVisionProcessor: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.CognitiveServicesVisionProcessor", type: { name: "Composite", @@ -1186,7 +1712,7 @@ export const CognitiveServicesVisionProcessor: coreHttp.CompositeMapper = { } }; -export const IotHubMessageSink: coreHttp.CompositeMapper = { +export const IotHubMessageSink: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.IotHubMessageSink", type: { name: "Composite", @@ -1206,7 +1732,7 @@ export const IotHubMessageSink: coreHttp.CompositeMapper = { } }; -export const FileSink: coreHttp.CompositeMapper = { +export const FileSink: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.FileSink", type: { name: "Composite", @@ -1240,7 +1766,7 @@ export const FileSink: coreHttp.CompositeMapper = { } }; -export const VideoSink: coreHttp.CompositeMapper = { +export const VideoSink: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.VideoSink", type: { name: "Composite", @@ -1263,6 +1789,13 @@ export const VideoSink: coreHttp.CompositeMapper = { className: "VideoCreationProperties" } }, + videoPublishingOptions: { + serializedName: "videoPublishingOptions", + type: { + name: "Composite", + className: "VideoPublishingOptions" + } + }, localMediaCachePath: { serializedName: "localMediaCachePath", required: true, @@ -1281,7 +1814,7 @@ export const VideoSink: coreHttp.CompositeMapper = { } }; -export const UnsecuredEndpoint: coreHttp.CompositeMapper = { +export const UnsecuredEndpoint: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.UnsecuredEndpoint", type: { name: "Composite", @@ -1294,7 +1827,7 @@ export const UnsecuredEndpoint: coreHttp.CompositeMapper = { } }; -export const TlsEndpoint: coreHttp.CompositeMapper = { +export const TlsEndpoint: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.TlsEndpoint", type: { name: "Composite", @@ -1321,7 +1854,7 @@ export const TlsEndpoint: coreHttp.CompositeMapper = { } }; -export const UsernamePasswordCredentials: coreHttp.CompositeMapper = { +export const UsernamePasswordCredentials: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials", type: { name: "Composite", @@ -1348,7 +1881,7 @@ export const UsernamePasswordCredentials: coreHttp.CompositeMapper = { } }; -export const HttpHeaderCredentials: coreHttp.CompositeMapper = { +export const HttpHeaderCredentials: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.HttpHeaderCredentials", type: { name: "Composite", @@ -1375,7 +1908,27 @@ export const HttpHeaderCredentials: coreHttp.CompositeMapper = { } }; -export const PemCertificateList: coreHttp.CompositeMapper = { +export const SymmetricKeyCredentials: coreClient.CompositeMapper = { + serializedName: "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials", + type: { + name: "Composite", + className: "SymmetricKeyCredentials", + uberParent: "CredentialsBase", + polymorphicDiscriminator: CredentialsBase.type.polymorphicDiscriminator, + modelProperties: { + ...CredentialsBase.type.modelProperties, + key: { + serializedName: "key", + required: true, + type: { + name: "String" + } + } + } + } +}; + +export const PemCertificateList: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.PemCertificateList", type: { name: "Composite", @@ -1400,7 +1953,7 @@ export const PemCertificateList: coreHttp.CompositeMapper = { } }; -export const NamedLineString: coreHttp.CompositeMapper = { +export const NamedLineString: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.NamedLineString", type: { name: "Composite", @@ -1420,7 +1973,7 @@ export const NamedLineString: coreHttp.CompositeMapper = { } }; -export const ImageFormatRaw: coreHttp.CompositeMapper = { +export const ImageFormatRaw: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.ImageFormatRaw", type: { name: "Composite", @@ -1441,7 +1994,7 @@ export const ImageFormatRaw: coreHttp.CompositeMapper = { } }; -export const ImageFormatJpeg: coreHttp.CompositeMapper = { +export const ImageFormatJpeg: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.ImageFormatJpeg", type: { name: "Composite", @@ -1461,7 +2014,7 @@ export const ImageFormatJpeg: coreHttp.CompositeMapper = { } }; -export const ImageFormatBmp: coreHttp.CompositeMapper = { +export const ImageFormatBmp: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.ImageFormatBmp", type: { name: "Composite", @@ -1475,7 +2028,7 @@ export const ImageFormatBmp: coreHttp.CompositeMapper = { } }; -export const ImageFormatPng: coreHttp.CompositeMapper = { +export const ImageFormatPng: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.ImageFormatPng", type: { name: "Composite", @@ -1489,7 +2042,7 @@ export const ImageFormatPng: coreHttp.CompositeMapper = { } }; -export const NamedPolygonString: coreHttp.CompositeMapper = { +export const NamedPolygonString: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.NamedPolygonString", type: { name: "Composite", @@ -1509,7 +2062,7 @@ export const NamedPolygonString: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisCustomOperation: coreHttp.CompositeMapper = { +export const SpatialAnalysisCustomOperation: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.SpatialAnalysisCustomOperation", type: { name: "Composite", @@ -1530,7 +2083,7 @@ export const SpatialAnalysisCustomOperation: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisTypedOperationBase: coreHttp.CompositeMapper = { +export const SpatialAnalysisTypedOperationBase: coreClient.CompositeMapper = { serializedName: "SpatialAnalysisTypedOperationBase", type: { name: "Composite", @@ -1548,18 +2101,36 @@ export const SpatialAnalysisTypedOperationBase: coreHttp.CompositeMapper = { name: "String" } }, + calibrationConfiguration: { + serializedName: "calibrationConfiguration", + type: { + name: "String" + } + }, cameraConfiguration: { serializedName: "cameraConfiguration", type: { name: "String" } }, + cameraCalibratorNodeConfiguration: { + serializedName: "cameraCalibratorNodeConfiguration", + type: { + name: "String" + } + }, detectorNodeConfiguration: { serializedName: "detectorNodeConfiguration", type: { name: "String" } }, + trackerNodeConfiguration: { + serializedName: "trackerNodeConfiguration", + type: { + name: "String" + } + }, enableFaceMaskClassifier: { serializedName: "enableFaceMaskClassifier", type: { @@ -1570,7 +2141,7 @@ export const SpatialAnalysisTypedOperationBase: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisPersonCountEvent: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonCountEvent: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisPersonCountEvent", @@ -1592,7 +2163,7 @@ export const SpatialAnalysisPersonCountEvent: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisPersonZoneCrossingEvent: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonZoneCrossingEvent: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisPersonZoneCrossingEvent", @@ -1608,7 +2179,7 @@ export const SpatialAnalysisPersonZoneCrossingEvent: coreHttp.CompositeMapper = } }; -export const SpatialAnalysisPersonDistanceEvent: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonDistanceEvent: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisPersonDistanceEvent", @@ -1642,7 +2213,7 @@ export const SpatialAnalysisPersonDistanceEvent: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisPersonLineCrossingEvent: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonLineCrossingEvent: coreClient.CompositeMapper = { type: { name: "Composite", className: "SpatialAnalysisPersonLineCrossingEvent", @@ -1652,7 +2223,7 @@ export const SpatialAnalysisPersonLineCrossingEvent: coreHttp.CompositeMapper = } }; -export const GrpcExtension: coreHttp.CompositeMapper = { +export const GrpcExtension: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.GrpcExtension", type: { name: "Composite", @@ -1678,7 +2249,7 @@ export const GrpcExtension: coreHttp.CompositeMapper = { } }; -export const HttpExtension: coreHttp.CompositeMapper = { +export const HttpExtension: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.HttpExtension", type: { name: "Composite", @@ -1691,7 +2262,7 @@ export const HttpExtension: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisPersonCountOperation: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonCountOperation: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonCountOperation", type: { @@ -1719,7 +2290,7 @@ export const SpatialAnalysisPersonCountOperation: coreHttp.CompositeMapper = { } }; -export const SpatialAnalysisPersonZoneCrossingOperation: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonZoneCrossingOperation: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonZoneCrossingOperation", type: { @@ -1747,7 +2318,7 @@ export const SpatialAnalysisPersonZoneCrossingOperation: coreHttp.CompositeMappe } }; -export const SpatialAnalysisPersonDistanceOperation: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonDistanceOperation: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonDistanceOperation", type: { @@ -1775,7 +2346,7 @@ export const SpatialAnalysisPersonDistanceOperation: coreHttp.CompositeMapper = } }; -export const SpatialAnalysisPersonLineCrossingOperation: coreHttp.CompositeMapper = { +export const SpatialAnalysisPersonLineCrossingOperation: coreClient.CompositeMapper = { serializedName: "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonLineCrossingOperation", type: { @@ -1830,6 +2401,7 @@ export let discriminators = { "EndpointBase.#Microsoft.VideoAnalyzer.TlsEndpoint": TlsEndpoint, "CredentialsBase.#Microsoft.VideoAnalyzer.UsernamePasswordCredentials": UsernamePasswordCredentials, "CredentialsBase.#Microsoft.VideoAnalyzer.HttpHeaderCredentials": HttpHeaderCredentials, + "CredentialsBase.#Microsoft.VideoAnalyzer.SymmetricKeyCredentials": SymmetricKeyCredentials, "CertificateSource.#Microsoft.VideoAnalyzer.PemCertificateList": PemCertificateList, "NamedLineBase.#Microsoft.VideoAnalyzer.NamedLineString": NamedLineString, "ImageFormatProperties.#Microsoft.VideoAnalyzer.ImageFormatRaw": ImageFormatRaw, diff --git a/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/parameters.ts b/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/parameters.ts index cbca1908a524..be99f99c1126 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/parameters.ts +++ b/sdk/videoanalyzer/video-analyzer-edge/src/generated/models/parameters.ts @@ -6,7 +6,7 @@ * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import { OperationURLParameter } from "@azure/core-http"; +import { OperationURLParameter } from "@azure/core-client"; export const $host: OperationURLParameter = { parameterPath: "$host", diff --git a/sdk/videoanalyzer/video-analyzer-edge/src/index.ts b/sdk/videoanalyzer/video-analyzer-edge/src/index.ts index 00940cb8a49d..bff0f45659db 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/src/index.ts +++ b/sdk/videoanalyzer/video-analyzer-edge/src/index.ts @@ -2,7 +2,13 @@ // Licensed under the MIT license. export * from "./generated/models"; -import { PipelineTopology, LivePipeline } from "./generated/models"; +import { + PipelineTopology, + LivePipeline, + RemoteDeviceAdapter, + UnsecuredEndpoint, + OnvifDevice +} from "./generated/models"; import { MethodRequest } from "./generated/models/mappers"; /** @@ -42,7 +48,13 @@ export type RequestType = | "livePipelineList" | "livePipelineDelete" | "livePipelineActivate" - | "livePipelineDeactivate"; + | "livePipelineDeactivate" + | "onvifDeviceDiscover" + | "onvifDeviceGet" + | "remoteDeviceAdapterSet" + | "remoteDeviceAdapterList" + | "remoteDeviceAdapterGet" + | "remoteDeviceAdapterDelete"; const apiVersion = MethodRequest.type.modelProperties!.apiVersion.defaultValue; @@ -119,14 +131,73 @@ export function createRequest( request: "livePipelineDeactivate", payload: string ): Request; -export function createRequest( +/** + * Create a request to list all of the onvif devices on the network + * @param request - The string which determines the type of request. In this case a OnvifDeviceDiscoverRequest request. + */ +export function createRequest(request: "onvifDeviceDiscover"): Request; +/** + * Create a request to get an onvif device + * @param request - The string which determines the type of request. In this case a OnvifDeviceGetRequest request. + * @param payload - The data to send in the request. OnvifDeviceGet requests require an onvif device name. + */ +export function createRequest( + request: "onvifDeviceGet", + payload: UnsecuredEndpoint +): Request; +/** + * Create a request to set a remote device adapter + * @param request - The string which determines the type of request. In this case a RemoteDeviceAdapterSetRequest request. + * @param payload - The data to send in the request. RemoteDeviceAdapterSet requests require a remote device adapter. + */ +export function createRequest( + request: "remoteDeviceAdapterSet", + payload: RemoteDeviceAdapter +): Request; +/** + * Create a request to list all remote device adapters on the network + * @param request - The string which determines the type of request. In this case a RemoteDeviceAdapterListRequest request. + */ +export function createRequest(request: "remoteDeviceAdapterList"): Request; +/** + * Create a request to get a remote device adapter + * @param request - The string which determines the type of request. In this case a RemoteDeviceAdapterGetRequest request. + * @param payload - The data to send in the request. RemoteDeviceAdapterGet requests require a remote device adapter name. + */ +export function createRequest( + request: "remoteDeviceAdapterGet", + payload: string +): Request; +/** + * Create a request to delete a remote device adapter + * @param request - The string which determines the type of request. In this case a RemoteDeviceAdapterDeleteRequest request. + * @param payload - The data to send in the request. RemoteDeviceAdapterDelete requests require a remote device adapter name. + */ +export function createRequest( + request: "remoteDeviceAdapterDelete", + payload: string +): Request; +export function createRequest< + T extends PipelineTopology | NameObject | LivePipeline | RemoteDeviceAdapter | OnvifDevice +>( request: RequestType, - payload?: string | PipelineTopology | LivePipeline + payload?: string | PipelineTopology | LivePipeline | UnsecuredEndpoint | RemoteDeviceAdapter ): Request | Request { + let finalPayload = {}; + if (typeof payload === "string") { + finalPayload = { name: payload }; + } else if ( + payload && + (payload as UnsecuredEndpoint)["@type"] === "#Microsoft.VideoAnalyzer.UnsecuredEndpoint" + ) { + finalPayload = { endpoint: payload }; + } else { + finalPayload = payload ?? {}; + } return { methodName: request, payload: { - ...(typeof payload === "string" ? { name: payload } : payload ?? {}), + ...finalPayload, "@apiVersion": apiVersion } }; diff --git a/sdk/videoanalyzer/video-analyzer-edge/swagger/README.md b/sdk/videoanalyzer/video-analyzer-edge/swagger/README.md index e26d3e9af899..11d9818cdd8a 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/swagger/README.md +++ b/sdk/videoanalyzer/video-analyzer-edge/swagger/README.md @@ -12,10 +12,11 @@ generate-metadata: false license-header: MICROSOFT_MIT_NO_VERSION output-folder: ../ source-code-folder-path: ./src/generated -require: https://github.com/Azure/azure-rest-api-specs/blob/55b3e2d075398ec62f9322829494ff6a4323e299/specification/videoanalyzer/data-plane/readme.md +require: https://github.com/Azure/azure-rest-api-specs/blob/60fcb275cbce38d343f9c35411786e672aba154e/specification/videoanalyzer/data-plane/readme.md add-credentials: false use-extension: - "@autorest/typescript": "6.0.0-dev.20210223.1" + "@autorest/typescript": "6.0.0-beta.13" +use-core-v2: true modelerfour: naming: override: @@ -38,6 +39,13 @@ directive: || definition == 'LivePipelineGetRequest' || definition == 'LivePipelineActivateRequest' || definition == 'LivePipelineDeactivateRequest' + || definition == 'OnvifDeviceDiscoverRequest' + || definition == 'OnvifDeviceGetRequest' + || definition == 'RemoteDeviceAdapterSetRequest' + || definition == 'RemoteDeviceAdapterSetRequestBody' + || definition == 'RemoteDeviceAdapterListRequest' + || definition == 'RemoteDeviceAdapterGetRequest' + || definition == 'RemoteDeviceAdapterDeleteRequest' || definition == 'LivePipelineDeleteRequest') { delete $[definition]; } diff --git a/sdk/videoanalyzer/video-analyzer-edge/test/payloadCreation.spec.ts b/sdk/videoanalyzer/video-analyzer-edge/test/payloadCreation.spec.ts index 2983c6470edc..48cac0723e8b 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/test/payloadCreation.spec.ts +++ b/sdk/videoanalyzer/video-analyzer-edge/test/payloadCreation.spec.ts @@ -7,7 +7,7 @@ import { RtspSource, UnsecuredEndpoint, NodeInput, - IotHubMessageSink, + VideoSink, createRequest } from "../src"; @@ -31,11 +31,13 @@ describe("test", () => { nodeName: "rtspSource" }; - const msgSink: IotHubMessageSink = { - name: "msgSink", + const videoSink: VideoSink = { + name: "videoSink", inputs: [nodeInput], - hubOutputName: "${hubSinkOutputName}", - "@type": "#Microsoft.VideoAnalyzer.IotHubMessageSink" + videoName: "video", + localMediaCachePath: "/var/lib/videoanalyzer/tmp/", + localMediaCacheMaximumSizeMiB: "1024", + "@type": "#Microsoft.VideoAnalyzer.VideoSink" }; const pipelineTopology: PipelineTopology = { @@ -45,15 +47,14 @@ describe("test", () => { parameters: [ { name: "rtspUserName", type: "String", default: "dummyUsername" }, { name: "rtspPassword", type: "SecretString", default: "dummyPassword" }, - { name: "rtspUrl", type: "String" }, - { name: "hubSinkOutputName", type: "String" } + { name: "rtspUrl", type: "String" } ], sources: [rtspSource], - sinks: [msgSink] + sinks: [videoSink] } }; const pipelineTopologySetRequest = createRequest("pipelineTopologySet", pipelineTopology); - assert.strictEqual(pipelineTopologySetRequest.payload["@apiVersion"], "1.0"); + assert.strictEqual(pipelineTopologySetRequest.payload["@apiVersion"], "1.1"); }); }); diff --git a/sdk/videoanalyzer/video-analyzer-edge/tsconfig.json b/sdk/videoanalyzer/video-analyzer-edge/tsconfig.json index a715b8de94d2..77dc2e83089d 100644 --- a/sdk/videoanalyzer/video-analyzer-edge/tsconfig.json +++ b/sdk/videoanalyzer/video-analyzer-edge/tsconfig.json @@ -3,16 +3,10 @@ "compilerOptions": { "outDir": "./dist-esm", "declarationDir": "./types", - "preserveConstEnums": true + "preserveConstEnums": true, + "paths": { + "@azure/video-analyzer-edge": ["./src/index"] + } }, - "exclude": [ - "node_modules", - "types", - "temp", - "browser", - "dist", - "dist-samples", - "dist-esm", - "./samples/**/*.ts" - ] + "include": ["src/**/*.ts", "test/**/*.ts", "samples-dev/**/*.ts"] }