Skip to content

Commit

Permalink
Lint
Browse files Browse the repository at this point in the history
  • Loading branch information
axinging committed Jan 31, 2024
1 parent 38de564 commit 5c0b540
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ const matMulReadWriteFnSource =
${
hasBias ?
`value = value + ${isChannelsLast ? 'bias[colIn]' : `${typeSnippet(component, dataType)}(bias[row])`};` :
''}
'' }
${applyActivation}
${outputVariable.setByIndices('vec3<u32>(coords)', 'value')}
}
Expand Down
8 changes: 1 addition & 7 deletions js/web/lib/wasm/jsep/webgpu/ops/conv-grouped.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,7 @@ export const createGroupedConvProgramInfo =
{type: DataType.uint32, data: [attributes.pads[0], attributes.pads[1]]},
{type: DataType.uint32, data: outputChannelsPerGroup}
];
<<<<<<< HEAD
if (attributes.activation === 'Clip') {
programUniforms.push(
{type: DataType.float, data: attributes.clipMax!}, {type: DataType.float, data: attributes.clipMin!});
}
== == === appendActivationUniformsData(attributes, programUniforms);
>>>>>>> main
appendActivationUniformsData(attributes, programUniforms);
programUniforms.push(
...createTensorShapeVariables(xShape), ...createTensorShapeVariables(wShape),
...createTensorShapeVariables(outputShape));
Expand Down
7 changes: 5 additions & 2 deletions js/web/lib/wasm/jsep/webgpu/ops/fuse-utils.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

import {DataType} from '../../../wasm-common';
import {MAX_CLIP, MIN_CLIP} from '../../util';
import {ProgramUniform} from '../types';

Expand Down Expand Up @@ -36,9 +37,11 @@ export const getActivationSnippet = (attributes: InternalActivationAttributes, v
export const appendActivationUniformsData =
(attributes: InternalActivationAttributes, programUniform: ProgramUniform[]) => {
if (attributes.activation === 'Clip') {
programUniform.push({type: 'float32', data: attributes.clipMax!}, {type: 'float32', data: attributes.clipMin!});
programUniform.push(
{type: DataType.float, data: attributes.clipMax!}, {type: DataType.float, data: attributes.clipMin!});
} else if (attributes.activation === 'HardSigmoid') {
programUniform.push({type: 'float32', data: attributes.alpha!}, {type: 'float32', data: attributes.beta!});
programUniform.push(
{type: DataType.float, data: attributes.alpha!}, {type: DataType.float, data: attributes.beta!});
}
};

Expand Down
8 changes: 0 additions & 8 deletions js/web/lib/wasm/jsep/webgpu/ops/matmul.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,7 @@ export const createNaiveMatmulProgramInfo =
{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},
{type: DataType.uint32, data: K}
];
<<<<<<< HEAD
if (activationAttributes.activation === 'Clip') {
programUniforms.push(
{type: DataType.float, data: activationAttributes.clipMax!},
{type: DataType.float, data: activationAttributes.clipMin!});
}
=======
appendActivationUniformsData(activationAttributes, programUniforms);
>>>>>>> main
programUniforms.push(
...createTensorShapeVariables(outerDims), ...createTensorShapeVariables(aShape),
...createTensorShapeVariables(bShape));
Expand Down

0 comments on commit 5c0b540

Please sign in to comment.