diff --git a/index.bs b/index.bs index 97bcff75..94168ba2 100644 --- a/index.bs +++ b/index.bs @@ -1019,11 +1019,9 @@ Both {{MLGraphBuilder}}.{{MLGraphBuilder/build()}} and {{MLGraphBuilder}}.{{MLGr The [=new=] {{MLGraphBuilder}} constructor steps are: 1. If [=this=]'s [=relevant global object=]'s [=associated Document=] is not [=allowed to use=] the [=webnn-feature|webnn=] feature, throw a "{{SecurityError}}" {{DOMException}} and abort these steps. 1. Let |context| be the first argument. -1. If |context| is not a valid {{MLContext}}, throw a "{{TypeError}}" and abort these steps. +1. If the validate MLContext steps given |context| return `false`, throw a "{{TypeError}}" and abort these steps. 1. Set {{MLGraphBuilder/[[context]]}} to |context|. -Issue(webmachinelearning/webnn#308): Add an algorithm to validate {{MLContext}}. - ### The batchNormalization() method ### {#api-mlgraphbuilder-batchnorm} Normalize the tensor values of input features across the batch dimension using [[Batch-Normalization]]. For each input feature, the mean and variance values of that feature supplied in this calculation as parameters are previously computed across the batch dimension of the input during the model training phase of this operation. @@ -1577,7 +1575,7 @@ dictionary MLGruCellOptions { partial interface MLGraphBuilder { MLOperand gruCell(MLOperand input, MLOperand weight, MLOperand recurrentWeight, - MLOperand hiddenState, unsigned long hiddenSize, + MLOperand hiddenState, unsigned long hiddenSize, optional MLGruCellOptions options = {}); }; @@ -1598,7 +1596,7 @@ partial interface MLGraphBuilder { **Returns:** an {{MLOperand}}. The 2-D tensor of shape [batch_size, hidden_size], the cell output hidden state of a single time step of the recurrent network.
- The behavior of this operation can be generically emulated via other operations as shown below, when the weight layout is the default *"zrn"* layout, and the activation functions of the update/reset gate and new gate are of the operator types *sigmoid* and *tanh* respectively. + The behavior of this operation can be generically emulated via other operations as shown below, when the weight layout is the default *"zrn"* layout, and the activation functions of the update/reset gate and new gate are of the operator types *sigmoid* and *tanh* respectively.
     const one = builder.constant(1);
     const zero = builder.constant(0);
@@ -1924,8 +1922,8 @@ dictionary MLLstmOptions {
 };
 
 partial interface MLGraphBuilder {
-  sequence lstm(MLOperand input, MLOperand weight, MLOperand recurrentWeight, 
-                           unsigned long steps, unsigned long hiddenSize, 
+  sequence lstm(MLOperand input, MLOperand weight, MLOperand recurrentWeight,
+                           unsigned long steps, unsigned long hiddenSize,
                            optional MLLstmOptions options = {});
 };
 
@@ -2041,7 +2039,7 @@ dictionary MLLstmCellOptions {
 
 partial interface MLGraphBuilder {
   sequence lstmCell(MLOperand input, MLOperand weight, MLOperand recurrentWeight,
-                               MLOperand hiddenState, MLOperand cellState, unsigned long hiddenSize, 
+                               MLOperand hiddenState, MLOperand cellState, unsigned long hiddenSize,
                                optional MLLstmCellOptions options = {});
 };
 
@@ -2063,7 +2061,7 @@ partial interface MLGraphBuilder {
     **Returns:** a sequence of {{MLOperand}}. The first element of the sequence is the output hidden state of the current time step of the recurrent network. The following element is the output cell state. Both elements are 2-D tensors of shape [batch_size, hidden_size].
 
     
- The behavior of this operation can be generically emulated via other operations as shown below, when the weight layout is the default *"iofg"* layout, and the activation functions of the input/forget/output gate and the cell gate/the cell state's filter for the output hidden state are of the operator types *sigmoid* and *tanh* respectively. + The behavior of this operation can be generically emulated via other operations as shown below, when the weight layout is the default *"iofg"* layout, and the activation functions of the input/forget/output gate and the cell gate/the cell state's filter for the output hidden state are of the operator types *sigmoid* and *tanh* respectively.
     const zero = builder.constant(0);
 
@@ -2309,8 +2307,8 @@ partial interface MLGraphBuilder {
             is interpreted according to the value of *options.layout*.
         - *options*: an optional {{MLPool2dOptions}}. The optional parameters of the operation.
             - *windowDimensions*: a sequence of {{unsigned long}} of length 2. The dimensions of the sliding window,
-                [window_height, window_width]. If not present, the window dimensions are assumed to be the height  
-                and width dimensions of the input shape. 
+                [window_height, window_width]. If not present, the window dimensions are assumed to be the height
+                and width dimensions of the input shape.
             - *padding*: a sequence of {{unsigned long}} of length 4. The additional rows and columns added to the beginning and ending of each spatial dimension of *input*, [beginning_height, ending_height, beginning_width, ending_width]. If not present, the values are assumed to be [0,0,0,0].
             - *strides*: a sequence of {{unsigned long}} of length 2. The stride of the
                 sliding window for each spatial dimension of *input*,
@@ -2545,7 +2543,7 @@ partial interface MLGraphBuilder {
     **Arguments:**
         - *x*: an {{MLOperand}}. The input 2-D tensor.
 
-    **Returns:** 
+    **Returns:**
         - an {{MLOperand}}. The output 2-D tensor that contains the softmax results, of the same shape as the input tensor.
         - an {{MLActivation}}. The activation function representing the softmax operation.