Skip to content

Commit

Permalink
[AVA] Updated package to 1.1, moved to v2 samples flow, moved to core…
Browse files Browse the repository at this point in the history
…-rest-pipeline (#18425)

* updated package to 1.1

* updating table in readme

* fixing product slugs

* updating slugs and test

* updating slugs

* changing slugs

* fixing formatting
  • Loading branch information
hivyas authored Oct 29, 2021
1 parent cc8130b commit b02acd2
Show file tree
Hide file tree
Showing 27 changed files with 1,954 additions and 384 deletions.
4 changes: 2 additions & 2 deletions common/config/rush/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
"prettier": "^1.16.4",
"ts-node": "^7.0.1",
"tslib": "1.11.2",
"typescript": "^3.2.2",
"typescript": "^3.9.10",
"yargs": "^11.0.0"
},
"engines": {
Expand Down
6 changes: 4 additions & 2 deletions sdk/videoanalyzer/video-analyzer-edge/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
# Release History
## 1.0.0-beta.3 (2021-11-01)

## 1.0.0-beta.3 (Unreleased)

- Added device discovery and device detail request for ONVIF enabled devices.
- Added Remote Device Adapter configuration for ingesting video in a private network
- Added retention policy to VideoSink

## 1.0.0-beta.2 (2021-05-28)

Expand Down
104 changes: 55 additions & 49 deletions sdk/videoanalyzer/video-analyzer-edge/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Azure Video Analyzer Edge client library for JavaScript

Azure Video Analyzer provides a platform to build intelligent video applications that span the edge and the cloud. The platform offers the capability to capture, record, and analyze live video along with publishing the results, video and video analytics, to Azure services in the cloud or the edge. It is designed to be an extensible platform, enabling you to connect different video analysis edge modules such as Cognitive services containers, custom edge modules built by you with open source machine learning models or custom models trained with your own data. You can then use them to analyze live video without worrying about the complexity of building and running a live video pipeline.
Azure Video Analyzer is an [Azure Applied AI Service][applied-ai-service] that provides a platform for you to build intelligent video applications that can span both edge and cloud infrastructures. The platform offers the capability to capture, record, and analyze live video along with publishing the results, video and video analytics, to Azure services at the edge or in the cloud. It is designed to be an extensible platform, enabling you to connect different video inferencing edge modules such as Cognitive services modules, or custom inferencing modules that have been trained with your own data using either open-source machine learning or [Azure Machine Learning][machine-learning].

Use the client library for Video Analyzer Edge to:

Expand Down Expand Up @@ -31,7 +31,9 @@ npm install @azure/video-analyzer-edge

| SDK | Video Analyzer edge module |
| ------------ | -------------------------- |
| 1.0.0-beta.x | 1.0 |
| 1.0.0-beta.3 | 1.1 |
| 1.0.0-beta.2 | 1.0 |
| 1.0.0-beta.1 | 1.0 |

### Creating a pipeline topology and making requests

Expand All @@ -53,44 +55,47 @@ To create a pipeline topology you need to define sources and sinks.

```typescript
const rtspSource: RtspSource = {
name: "rtspSource",
endpoint: {
url: "${rtspUrl}",
"@type": "#Microsoft.VideoAnalyzer.UnsecuredEndpoint",
credentials: {
username: "${rtspUserName}",
password: "${rtspPassword}",
"@type": "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials"
}
} as UnsecuredEndpoint,
"@type": "#Microsoft.VideoAnalyzer.RtspSource"
};

const nodeInput: NodeInput = {
nodeName: "rtspSource"
};

const msgSink: IotHubMessageSink = {
name: "msgSink",
inputs: [nodeInput],
hubOutputName: "${hubSinkOutputName}",
"@type": "#Microsoft.VideoAnalyzer.IotHubMessageSink"
};

const pipelineTopology: PipelineTopology = {
name: "jsTestTopology",
properties: {
description: "Continuous video recording to a Video Analyzer video",
parameters: [
{ name: "rtspUserName", type: "String", default: "dummyUsername" },
{ name: "rtspPassword", type: "SecretString", default: "dummyPassword" },
{ name: "rtspUrl", type: "String" }
{ name: "hubSinkOutputName", type: "String" }
],
sources: [rtspSource],
sinks: [msgSink]
//Create a source for your pipeline topology
name: "rtspSource",
endpoint: {
url: "${rtspUrl}",
"@type": "#Microsoft.VideoAnalyzer.UnsecuredEndpoint",
credentials: {
username: "${rtspUserName}",
password: "${rtspPassword}",
"@type": "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials"
}
};
} as UnsecuredEndpoint,
"@type": "#Microsoft.VideoAnalyzer.RtspSource"
};

const nodeInput: NodeInput = {
//Create an input for your sink
nodeName: "rtspSource"
};

const videoSink: VideoSink = {
name: "videoSink",
inputs: [nodeInput],
videoName: "video",
localMediaCachePath: "/var/lib/videoanalyzer/tmp/",
localMediaCacheMaximumSizeMiB: "1024",
"@type": "#Microsoft.VideoAnalyzer.VideoSink"
}

const pipelineTopology: PipelineTopology = {
name: "jsTestTopology",
properties: {
description: "description for jsTestTopology",
parameters: [
{ name: "rtspUserName", type: "String", default: "testUsername" },
{ name: "rtspPassword", type: "SecretString", default: "testPassword" },
{ name: "rtspUrl", type: "String" },
],
sources: [rtspSource],
sinks: [videoSink]
}
};

```

Expand All @@ -100,10 +105,10 @@ To create a live pipeline instance, you need to have an existing pipeline topolo

```typescript
const livePipeline: LivePipeline = {
name: pipelineTopologyName,
name: "jsLivePipelineTest",
properties: {
description: "Continuous video recording to a Video Analyzer video",
topologyName: "jsTestTopology",
description: "description",
topologyName: pipelineTopologyName,
parameters: [{ name: "rtspUrl", value: "rtsp://sample.com" }]
}
};
Expand All @@ -117,15 +122,16 @@ To invoke a direct method on your device you need to first define the request us
import { createRequest } from "@azure/video-analyzer-edge";
import { Client } from "azure-iothub";

const deviceId = "lva-sample-device";
const moduleId = "mediaEdge";
const connectionString = "connectionString";
const iotHubClient = Client.fromConnectionString(connectionString);
const deviceId = process.env.iothub_deviceid;
const moduleId = process.env.iothub_moduleid;
const connectionString = process.env.iothub_connectionstring;
const iotHubClient = Client.fromConnectionString(connectionString); //Connect to your IoT Hub

const pipelineTopologySetRequest = createRequest("pipelineTopologySet", pipelineTopology);
const setPipelineTopResponse = await iotHubClient.invokeDeviceMethod(deviceId, moduleId, {
methodName: pipelineTopologySetRequest.methodName,
payload: pipelineTopologySetRequest.payload
});
methodName: pipelineTopologySetRequest.methodName,
payload: pipelineTopologySetRequest.payload
});
```

## Troubleshooting
Expand Down
24 changes: 17 additions & 7 deletions sdk/videoanalyzer/video-analyzer-edge/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@
"audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit",
"build:browser": "tsc -p . && cross-env ONLY_BROWSER=true rollup -c 2>&1",
"build:node": "tsc -p . && cross-env ONLY_NODE=true rollup -c 2>&1",
"build:samples": "dev-tool samples prep && cd dist-samples && tsc -p .",
"build:samples": "echo Obsolete.",
"build:test": "tsc -p . && rollup -c 2>&1",
"build": "npm run clean && tsc -p . && rollup -c 2>&1 && api-extractor run --local",
"check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"",
"check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"",
"clean": "rimraf dist dist-* test-dist temp types *.tgz *.log",
"execute:samples": "npm run build:samples && dev-tool samples run dist-samples/javascript dist-samples/typescript/dist/dist-samples/typescript/src/",
"execute:samples": "dev-tool samples run samples-dev",
"extract-api": "tsc -p . && api-extractor run --local",
"format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"",
"format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"",
"integration-test:browser": "karma start --single-run",
"integration-test:node": "nyc mocha -r esm --require source-map-support/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 5000000 --full-trace \"dist-esm/test/{,!(browser)/**/}/*.spec.js\"",
"integration-test": "npm run integration-test:node && npm run integration-test:browser",
Expand Down Expand Up @@ -59,11 +59,12 @@
"prettier": "@azure/eslint-plugin-azure-sdk/prettier.json",
"dependencies": {
"@azure/core-auth": "^1.3.0",
"@azure/core-http": "^2.0.0",
"@azure/core-tracing": "1.0.0-preview.13",
"@azure/logger": "^1.0.0",
"events": "^3.0.0",
"tslib": "^2.2.0"
"tslib": "^2.2.0",
"@azure/core-client": "^1.0.0",
"@azure/core-rest-pipeline": "^1.1.0"
},
"devDependencies": {
"@azure/dev-tool": "^1.0.0",
Expand Down Expand Up @@ -99,9 +100,18 @@
"typescript": "~4.2.0",
"util": "^0.12.1",
"typedoc": "0.15.2",
"azure-iothub": "^1.13.1"
"azure-iothub": "^1.14.6"
},
"//smokeTestConfiguration": {
"skipFolder": true
},
"//sampleConfiguration": {
"productName": "Azure Video Analyzer",
"productSlugs": [
"azure"
],
"requiredResources": {
"Azure IoT Hub account": "https://docs.microsoft.com/azure/iot-hub/iot-hub-create-through-portal"
}
}
}
Loading

0 comments on commit b02acd2

Please sign in to comment.