Skip to content

Commit

Permalink
fix lintrunner error
Browse files Browse the repository at this point in the history
  • Loading branch information
ajindal1 committed Sep 19, 2023
1 parent 3607251 commit 1209cdf
Showing 1 changed file with 4 additions and 8 deletions.
12 changes: 4 additions & 8 deletions onnxruntime/test/optimizer/graph_transform_test_layernorm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -419,12 +419,10 @@ TEST_F(GraphTransformationTests, SimplifiedLayerNormFusionTest) {
for (const Node& node : graph.Nodes()) {
if (node.OpType() == "SimplifiedLayerNormalization") {
// LayerNormalization should have two inputs.
EXPECT_EQ(node.InputDefs().size(), 2u) <<
"LayerNormalization number of inputs does not equal to 2. Got:" << node.InputDefs().size();
EXPECT_EQ(node.InputDefs().size(), 2u) << "LayerNormalization number of inputs does not equal to 2. Got:" << node.InputDefs().size();
// LayerNormalization input "scale" and "bias" should have the same dimension.
const TensorShapeProto* scale_shape = node.InputDefs()[1]->Shape();
EXPECT_EQ(scale_shape->dim_size(), 1) <<
"LayerNormalization scale should be 1D. Got: " << scale_shape->dim_size();
EXPECT_EQ(scale_shape->dim_size(), 1) << "LayerNormalization scale should be 1D. Got: " << scale_shape->dim_size();
} else {
EXPECT_TRUE(false) << "Unexpected node " << node.Name();
}
Expand Down Expand Up @@ -455,12 +453,10 @@ TEST_F(GraphTransformationTests, LayerNormScaleBiasTest) {
for (const Node& node : graph.Nodes()) {
if (node.OpType() == "LayerNormalization") {
// LayerNormalization should have three inputs.
EXPECT_EQ(node.InputDefs().size(), 3u) <<
"LayerNormalization number of inputs does not equal to 3. Got:" << node.InputDefs().size();
EXPECT_EQ(node.InputDefs().size(), 3u) << "LayerNormalization number of inputs does not equal to 3. Got:" << node.InputDefs().size();

Check warning on line 456 in onnxruntime/test/optimizer/graph_transform_test_layernorm.cc

View workflow job for this annotation

GitHub Actions / cpplint

[cpplint] onnxruntime/test/optimizer/graph_transform_test_layernorm.cc#L456

Lines should be <= 120 characters long [whitespace/line_length] [2]
Raw output
onnxruntime/test/optimizer/graph_transform_test_layernorm.cc:456:  Lines should be <= 120 characters long  [whitespace/line_length] [2]
// LayerNormalization input "scale" and "bias" should have the same dimension.
const TensorShapeProto* scale_shape = node.InputDefs()[1]->Shape();
EXPECT_EQ(scale_shape->dim_size(), 1) <<
"LayerNormalization scale should be 1D. Got: " << scale_shape->dim_size();
EXPECT_EQ(scale_shape->dim_size(), 1) << "LayerNormalization scale should be 1D. Got: " << scale_shape->dim_size();

Check warning on line 459 in onnxruntime/test/optimizer/graph_transform_test_layernorm.cc

View workflow job for this annotation

GitHub Actions / cpplint

[cpplint] onnxruntime/test/optimizer/graph_transform_test_layernorm.cc#L459

Lines should be <= 120 characters long [whitespace/line_length] [2]
Raw output
onnxruntime/test/optimizer/graph_transform_test_layernorm.cc:459:  Lines should be <= 120 characters long  [whitespace/line_length] [2]
}
}
}
Expand Down

0 comments on commit 1209cdf

Please sign in to comment.