Skip to content

Commit

Permalink
webnn: Remove sync methods
Browse files Browse the repository at this point in the history
See webmachinelearning/webnn#548

Deletes all sync methods and simplifies the names of all "Async"
methods - e.g. BuildAsync() -> Build()

Bug: 40283536, 41481333
Change-Id: I3b38d987cb4641ea41ab9a974c46dcba16d5c108
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/5292884
Commit-Queue: Austin Sullivan <[email protected]>
Reviewed-by: Reilly Grant <[email protected]>
Reviewed-by: ningxin hu <[email protected]>
Reviewed-by: Alex Gough <[email protected]>
Cr-Commit-Position: refs/heads/main@{#1261223}
  • Loading branch information
a-sully authored and chromium-wpt-export-bot committed Feb 15, 2024
1 parent de8ec2f commit 542892d
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 82 deletions.
37 changes: 13 additions & 24 deletions webnn/idlharness.https.any.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,29 +28,18 @@ idl_test(
MLGraph: ['graph']
});

for (const executionType of ExecutionArray) {
const isSync = executionType === 'sync';
if (self.GLOBAL.isWindow() && isSync) {
continue;
}

if (isSync) {
self.context = navigator.ml.createContextSync();
} else {
self.context = await navigator.ml.createContext();
}

self.builder = new MLGraphBuilder(self.context);
self.input = builder.input('input', {dataType: 'float32', dimensions: [1, 1, 5, 5]});
self.filter = builder.constant({dataType: 'float32', dimensions: [1, 1, 3, 3]}, new Float32Array(9).fill(1));
self.relu = builder.relu();
self.output = builder.conv2d(input, filter, {activation: relu, inputLayout: "nchw"});

if (isSync) {
self.graph = builder.buildSync({output});
} else {
self.graph = await builder.build({output});
}
}
self.context = await navigator.ml.createContext();

self.builder = new MLGraphBuilder(self.context);
self.input =
builder.input('input', {dataType: 'float32', dimensions: [1, 1, 5, 5]});
self.filter = builder.constant(
{dataType: 'float32', dimensions: [1, 1, 3, 3]},
new Float32Array(9).fill(1));
self.relu = builder.relu();
self.output =
builder.conv2d(input, filter, {activation: relu, inputLayout: "nchw"});

self.graph = await builder.build({output});
}
);
73 changes: 15 additions & 58 deletions webnn/resources/utils.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
'use strict';

const ExecutionArray = ['sync', 'async'];

// https://webmachinelearning.github.io/webnn/#enumdef-mloperanddatatype
const TypedArrayDict = {
// workaround use Uint16 for Float16
Expand Down Expand Up @@ -793,25 +791,7 @@ const buildGraph = (operationName, builder, resources, buildFunc) => {
};

/**
* Build a graph, synchronously compile graph and execute, then check computed results.
* @param {String} operationName - An operation name
* @param {MLContext} context - A ML context
* @param {MLGraphBuilder} builder - A ML graph builder
* @param {Object} resources - Resources used for building a graph
* @param {Function} buildFunc - A build function for an operation
*/
const runSync = (operationName, context, builder, resources, buildFunc) => {
// build a graph
const [namedOutputOperands, inputs, outputs] = buildGraph(operationName, builder, resources, buildFunc);
// synchronously compile the graph up to the output operand
const graph = builder.buildSync(namedOutputOperands);
// synchronously execute the compiled graph.
context.computeSync(graph, inputs, outputs);
checkResults(operationName, namedOutputOperands, outputs, resources);
};

/**
* Build a graph, asynchronously compile graph and execute, then check computed results.
* Build a graph, compile graph and execute, then check computed results.
* @param {String} operationName - An operation name
* @param {MLContext} context - A ML context
* @param {MLGraphBuilder} builder - A ML graph builder
Expand All @@ -821,9 +801,9 @@ const runSync = (operationName, context, builder, resources, buildFunc) => {
const run = async (operationName, context, builder, resources, buildFunc) => {
// build a graph
const [namedOutputOperands, inputs, outputs] = buildGraph(operationName, builder, resources, buildFunc);
// asynchronously compile the graph up to the output operand
// compile the graph up to the output operand
const graph = await builder.build(namedOutputOperands);
// asynchronously execute the compiled graph
// execute the compiled graph
const result = await context.compute(graph, inputs, outputs);
checkResults(operationName, namedOutputOperands, result.outputs, resources);
};
Expand All @@ -842,41 +822,18 @@ const testWebNNOperation = (operationName, buildFunc, deviceType = 'cpu') => {
operationNameArray = operationName;
}

ExecutionArray.forEach(executionType => {
const isSync = executionType === 'sync';
if (self.GLOBAL.isWindow() && isSync) {
return;
}
let context;
let builder;
if (isSync) {
// test sync
operationNameArray.forEach((subOperationName) => {
const tests = loadTests(subOperationName);
setup(() => {
context = navigator.ml.createContextSync({deviceType});
builder = new MLGraphBuilder(context);
});
for (const subTest of tests) {
test(() => {
runSync(subOperationName, context, builder, subTest, buildFunc);
}, `${subTest.name} / ${executionType}`);
}
});
} else {
// test async
operationNameArray.forEach((subOperationName) => {
const tests = loadTests(subOperationName);
promise_setup(async () => {
context = await navigator.ml.createContext({deviceType});
builder = new MLGraphBuilder(context);
});
for (const subTest of tests) {
promise_test(async () => {
await run(subOperationName, context, builder, subTest, buildFunc);
}, `${subTest.name} / ${executionType}`);
}
});
let context;
let builder;
operationNameArray.forEach((subOperationName) => {
const tests = loadTests(subOperationName);
promise_setup(async () => {
context = await navigator.ml.createContext({deviceType});
builder = new MLGraphBuilder(context);
});
for (const subTest of tests) {
promise_test(async () => {
await run(subOperationName, context, builder, subTest, buildFunc);
}, `${subTest.name}`);
}
});
};
Expand Down

0 comments on commit 542892d

Please sign in to comment.