Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[js/common] fix typedoc warnings #19933

Merged
merged 5 commits into from
Mar 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions js/common/lib/backend.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export interface TrainingSessionHandler extends SessionHandler {
options: InferenceSession.RunOptions): Promise<SessionHandler.ReturnType>;

getParametersSize(trainableOnly: boolean): Promise<number>;
loadParametersBuffer(array: Uint8Array, trainableOnly: boolean): Promise<void>;
loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise<void>;
getContiguousParameters(trainableOnly: boolean): Promise<OnnxValue>;
}

Expand All @@ -77,8 +77,8 @@ export interface Backend {
Promise<InferenceSessionHandler>;

createTrainingSessionHandler?
(checkpointStateUriOrBuffer: TrainingSession.URIorBuffer, trainModelUriOrBuffer: TrainingSession.URIorBuffer,
evalModelUriOrBuffer: TrainingSession.URIorBuffer, optimizerModelUriOrBuffer: TrainingSession.URIorBuffer,
(checkpointStateUriOrBuffer: TrainingSession.UriOrBuffer, trainModelUriOrBuffer: TrainingSession.UriOrBuffer,
evalModelUriOrBuffer: TrainingSession.UriOrBuffer, optimizerModelUriOrBuffer: TrainingSession.UriOrBuffer,
options: InferenceSession.SessionOptions): Promise<TrainingSessionHandler>;
}

Expand Down
4 changes: 2 additions & 2 deletions js/common/lib/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ export declare namespace Env {
* When use with TypeScript, the type of this property is `GPUAdapter` defined in "@webgpu/types".
* Use `const adapter = env.webgpu.adapter as GPUAdapter;` in TypeScript to access this property with correct type.
*
* see comments on {@link GpuBufferType}
* see comments on {@link Tensor.GpuBufferType}
*/
readonly adapter: unknown;
/**
Expand All @@ -184,7 +184,7 @@ export declare namespace Env {
* When use with TypeScript, the type of this property is `GPUDevice` defined in "@webgpu/types".
* Use `const device = env.webgpu.device as GPUDevice;` in TypeScript to access this property with correct type.
*
* see comments on {@link GpuBufferType} for more details about why not use types defined in "@webgpu/types".
* see comments on {@link Tensor.GpuBufferType} for more details about why not use types defined in "@webgpu/types".
*/
readonly device: unknown;
/**
Expand Down
3 changes: 3 additions & 0 deletions js/common/lib/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ export * from './backend.js';
export * from './env.js';
export * from './inference-session.js';
export * from './tensor.js';
export * from './tensor-conversion.js';
export * from './tensor-factory.js';
export * from './trace.js';
export * from './onnx-model.js';
export * from './onnx-value.js';
export * from './training-session.js';
43 changes: 35 additions & 8 deletions js/common/lib/inference-session.ts
Original file line number Diff line number Diff line change
Expand Up @@ -186,22 +186,22 @@ export declare namespace InferenceSession {
// #region execution providers

// Currently, we have the following backends to support execution providers:
// Backend Node.js binding: supports 'cpu' and 'cuda'.
// Backend Node.js binding: supports 'cpu', 'dml' (win32), 'coreml' (macOS) and 'cuda' (linux).
// Backend WebAssembly: supports 'cpu', 'wasm', 'webgpu' and 'webnn'.
// Backend ONNX.js: supports 'webgl'.
// Backend React Native: supports 'cpu', 'xnnpack', 'coreml' (iOS), 'nnapi' (Android).
interface ExecutionProviderOptionMap {
coreml: CoreMLExecutionProviderOption;
cpu: CpuExecutionProviderOption;
coreml: CoreMlExecutionProviderOption;
cuda: CudaExecutionProviderOption;
dml: DmlExecutionProviderOption;
nnapi: NnapiExecutionProviderOption;
tensorrt: TensorRtExecutionProviderOption;
wasm: WebAssemblyExecutionProviderOption;
webgl: WebGLExecutionProviderOption;
xnnpack: XnnpackExecutionProviderOption;
webgpu: WebGpuExecutionProviderOption;
webnn: WebNNExecutionProviderOption;
nnapi: NnapiExecutionProviderOption;
xnnpack: XnnpackExecutionProviderOption;
}

type ExecutionProviderName = keyof ExecutionProviderOptionMap;
Expand All @@ -219,10 +219,6 @@ export declare namespace InferenceSession {
readonly name: 'cuda';
deviceId?: number;
}
export interface CoreMlExecutionProviderOption extends ExecutionProviderOption {
readonly name: 'coreml';
coreMlFlags?: number;
}
export interface DmlExecutionProviderOption extends ExecutionProviderOption {
readonly name: 'dml';
deviceId?: number;
Expand Down Expand Up @@ -253,8 +249,39 @@ export declare namespace InferenceSession {
}
export interface CoreMLExecutionProviderOption extends ExecutionProviderOption {
readonly name: 'coreml';
/**
* The bit flags for CoreML execution provider.
*
* ```
* COREML_FLAG_USE_CPU_ONLY = 0x001
* COREML_FLAG_ENABLE_ON_SUBGRAPH = 0x002
* COREML_FLAG_ONLY_ENABLE_DEVICE_WITH_ANE = 0x004
* COREML_FLAG_ONLY_ALLOW_STATIC_INPUT_SHAPES = 0x008
* COREML_FLAG_CREATE_MLPROGRAM = 0x010
* ```
*
* See include/onnxruntime/core/providers/coreml/coreml_provider_factory.h for more details.
*
* This flag is available only in ONNXRuntime (Node.js binding).
*/
coreMlFlags?: number;
/**
* Specify whether to use CPU only in CoreML EP.
*
* This setting is available only in ONNXRuntime (react-native).
*/
useCPUOnly?: boolean;
/**
* Specify whether to enable CoreML EP on subgraph.
*
* This setting is available only in ONNXRuntime (react-native).
*/
enableOnSubgraph?: boolean;
/**
* Specify whether to only enable CoreML EP for Apple devices with ANE (Apple Neural Engine).
*
* This setting is available only in ONNXRuntime (react-native).
*/
onlyEnableDeviceWithANE?: boolean;
}
export interface NnapiExecutionProviderOption extends ExecutionProviderOption {
Expand Down
2 changes: 1 addition & 1 deletion js/common/lib/onnx-value.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import {Tensor} from './tensor.js';

type NonTensorType = never;
export type NonTensorType = never;

/**
* Type OnnxValue Represents both tensors and non-tensors value for model's inputs/outputs.
Expand Down
2 changes: 1 addition & 1 deletion js/common/lib/tensor-factory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ export interface TensorFactory {
/**
* create a tensor from an ImageBitmap object
*
* @param bitMap - the ImageBitmap object to create tensor from
* @param bitmap - the ImageBitmap object to create tensor from
* @param options - An optional object representing options for creating tensor from URL.
*
* The following default settings will be applied:
Expand Down
4 changes: 2 additions & 2 deletions js/common/lib/tensor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ export interface Tensor extends TypedTensorBase<Tensor.Type>, TypedTensorUtils<T
/**
* type TensorConstructor defines the constructors of 'Tensor' to create CPU tensor instances.
*/
export interface TensorConstructor {
export interface TensorConstructor extends TensorFactory {
// #region CPU tensor - specify element type
/**
* Construct a new string tensor object from the given type, data and dims.
Expand Down Expand Up @@ -326,4 +326,4 @@ export interface TensorConstructor {
}

// eslint-disable-next-line @typescript-eslint/naming-convention
export const Tensor = TensorImpl as (TensorConstructor & TensorFactory);
export const Tensor = TensorImpl as TensorConstructor;
9 changes: 9 additions & 0 deletions js/common/lib/trace.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@

import {env} from './env-impl.js';

/**
* @ignore
*/
export const TRACE = (deviceType: string, label: string) => {
if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {
return;
Expand All @@ -29,13 +32,19 @@ const TRACE_FUNC = (msg: string, extraMsg?: string) => {
}
};

/**
* @ignore
*/
export const TRACE_FUNC_BEGIN = (extraMsg?: string) => {
if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {
return;
}
TRACE_FUNC('BEGIN', extraMsg);
};

/**
* @ignore
*/
export const TRACE_FUNC_END = (extraMsg?: string) => {
if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {
return;
Expand Down
16 changes: 8 additions & 8 deletions js/common/lib/training-session.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export declare namespace TrainingSession {
/**
* Either URI file path (string) or Uint8Array containing model or checkpoint information.
*/
type URIorBuffer = string|Uint8Array;
type UriOrBuffer = string|Uint8Array;
}

/**
Expand Down Expand Up @@ -98,13 +98,13 @@ export interface TrainingSession {
getParametersSize(trainableOnly: boolean): Promise<number>;

/**
* Copies parameter values from the given array to the training state. Currently, only supporting models with
* Copies parameter values from the given buffer to the training state. Currently, only supporting models with
* parameters of type Float32.
*
* @param buffer - Float32 buffer containing parameters converted to a Uint8Array.
* @param buffer - A Uint8Array representation of Float32 parameters.
* @param trainableOnly - True if trainable parameters only to be modified, false otherwise. Default value is true.
*/
loadParametersBuffer(array: Uint8Array, trainableOnly: boolean): Promise<void>;
loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise<void>;

/**
* Copies the model parameters to a contiguous buffer. Usually used in the context of Federated Learning.
Expand Down Expand Up @@ -157,19 +157,19 @@ export interface TrainingSessionCreateOptions {
/**
* URI or buffer for a .ckpt file that contains the checkpoint for the training model.
*/
checkpointState: TrainingSession.URIorBuffer;
checkpointState: TrainingSession.UriOrBuffer;
/**
* URI or buffer for the .onnx training file.
*/
trainModel: TrainingSession.URIorBuffer;
trainModel: TrainingSession.UriOrBuffer;
/**
* Optional. URI or buffer for the .onnx optimizer model file.
*/
optimizerModel?: TrainingSession.URIorBuffer;
optimizerModel?: TrainingSession.UriOrBuffer;
/**
* Optional. URI or buffer for the .onnx eval model file.
*/
evalModel?: TrainingSession.URIorBuffer;
evalModel?: TrainingSession.UriOrBuffer;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ steps:
node -e "a=require('child_process').execSync('git diff --name-only').toString();if(a)throw new Error('Following source files are not formatted: (did you run \"npm run format\"?)\n'+a)"
workingDirectory: '$(Build.SourcesDirectory)/js'
displayName: 'Check unformatted files'
- script: |
npx typedoc --emit none --treatWarningsAsErrors
workingDirectory: '$(Build.SourcesDirectory)/js/common'
displayName: 'TypeDoc Validation'
- script: |
npm run build:doc
workingDirectory: '$(Build.SourcesDirectory)/js/web'
Expand Down
Loading