From 70cb1716555e44105bafb14fdfe910904da03068 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Sun, 27 Aug 2023 03:11:43 -0700 Subject: [PATCH] ORC-1492: Fix checkstyle violations for tests in `mapreduce/tools/bench` modules ### What changes were proposed in this pull request? This PR aims to fix checkstyle in `mapreduce/tools/bench modules` modules and removes the following. https://github.com/apache/orc/blob/e4b833809b6bd6eee316232e0e96d24bd7d4f6ee/java/checkstyle-suppressions.xml#L46-L52 Please note that we suppress `Indentation` rule on the following three files additionally. ```xml ``` ### Why are the changes needed? To apply Checkstyle on test codes. ### How was this patch tested? Pass the Cis. Since we removed the global suppression rules on `test` code, CI should verify the result. Closes #1601 from dongjoon-hyun/ORC-1492. Authored-by: Dongjoon Hyun Signed-off-by: Dongjoon Hyun --- .../apache/orc/bench/core/filter/TestFilter.java | 4 ++-- java/checkstyle-suppressions.xml | 11 +++-------- .../org/apache/orc/mapred/TestMapRedFiltering.java | 6 +++--- .../org/apache/orc/mapreduce/FilterTestUtil.java | 12 ++++++------ .../orc/mapreduce/TestMapReduceFiltering.java | 13 +++++-------- .../src/test/org/apache/orc/impl/TestRLEv2.java | 6 +++--- .../src/test/org/apache/orc/tools/TestFileDump.java | 6 +++--- 7 files changed, 25 insertions(+), 33 deletions(-) diff --git a/java/bench/core/src/test/org/apache/orc/bench/core/filter/TestFilter.java b/java/bench/core/src/test/org/apache/orc/bench/core/filter/TestFilter.java index d6a3814fc91..a60556b1628 100644 --- a/java/bench/core/src/test/org/apache/orc/bench/core/filter/TestFilter.java +++ b/java/bench/core/src/test/org/apache/orc/bench/core/filter/TestFilter.java @@ -104,8 +104,8 @@ private Filter(String complexity, String filterType, boolean normalize) break; case "vector": Reader.Options options = new Reader.Options(conf) - .searchArgument(sArg, new String[0]) - .allowSARGToFilter(true); + .searchArgument(sArg, new String[0]) + .allowSARGToFilter(true); filter = FilterFactory.createBatchFilter(options, FilterBenchUtil.schema, false, diff --git a/java/checkstyle-suppressions.xml b/java/checkstyle-suppressions.xml index 32e04a58872..16047f69cda 100644 --- a/java/checkstyle-suppressions.xml +++ b/java/checkstyle-suppressions.xml @@ -38,16 +38,11 @@ + + + - - - - - - - - diff --git a/java/mapreduce/src/test/org/apache/orc/mapred/TestMapRedFiltering.java b/java/mapreduce/src/test/org/apache/orc/mapred/TestMapRedFiltering.java index 30462bf88d4..947595c70b7 100644 --- a/java/mapreduce/src/test/org/apache/orc/mapred/TestMapRedFiltering.java +++ b/java/mapreduce/src/test/org/apache/orc/mapred/TestMapRedFiltering.java @@ -71,7 +71,7 @@ public void readWithSArg() throws IOException, InterruptedException { new String[0]); FilterTestUtil.readStart(); RecordReader r = new OrcInputFormat() - .getRecordReader(split, new JobConf(conf), null); + .getRecordReader(split, new JobConf(conf), null); long rowCount = validateFilteredRecordReader(r); double p = FilterTestUtil.readPercentage(FilterTestUtil.readEnd(), fs.getFileStatus(filePath).getLen()); @@ -93,7 +93,7 @@ public void readWithSArgAsFilter() throws IOException { new String[0]); FilterTestUtil.readStart(); RecordReader r = new OrcInputFormat() - .getRecordReader(split, new JobConf(conf), null); + .getRecordReader(split, new JobConf(conf), null); long rowCount = validateFilteredRecordReader(r); double p = FilterTestUtil.readPercentage(FilterTestUtil.readEnd(), fs.getFileStatus(filePath).getLen()); @@ -139,7 +139,7 @@ private void readSingleRowWfilter(long idx) throws IOException, InterruptedExcep new String[0]); FilterTestUtil.readStart(); RecordReader r = new OrcInputFormat() - .getRecordReader(split, new JobConf(conf), null); + .getRecordReader(split, new JobConf(conf), null); OrcStruct row = new OrcStruct(FilterTestUtil.schema); long rowCount = 0; while (r.next(NullWritable.get(), row)) { diff --git a/java/mapreduce/src/test/org/apache/orc/mapreduce/FilterTestUtil.java b/java/mapreduce/src/test/org/apache/orc/mapreduce/FilterTestUtil.java index 262b47e7c14..6e609a9ab73 100644 --- a/java/mapreduce/src/test/org/apache/orc/mapreduce/FilterTestUtil.java +++ b/java/mapreduce/src/test/org/apache/orc/mapreduce/FilterTestUtil.java @@ -41,13 +41,13 @@ import static org.junit.jupiter.api.Assertions.assertEquals; public class FilterTestUtil { - private final static Logger LOG = LoggerFactory.getLogger(FilterTestUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(FilterTestUtil.class); public static final TypeDescription schema = TypeDescription.createStruct() - .addField("f1", TypeDescription.createLong()) - .addField("f2", TypeDescription.createDecimal().withPrecision(20).withScale(6)) - .addField("f3", TypeDescription.createLong()) - .addField("f4", TypeDescription.createString()) - .addField("ridx", TypeDescription.createLong()); + .addField("f1", TypeDescription.createLong()) + .addField("f2", TypeDescription.createDecimal().withPrecision(20).withScale(6)) + .addField("f3", TypeDescription.createLong()) + .addField("f4", TypeDescription.createString()) + .addField("ridx", TypeDescription.createLong()); public static final long RowCount = 4000000L; private static final int scale = 3; diff --git a/java/mapreduce/src/test/org/apache/orc/mapreduce/TestMapReduceFiltering.java b/java/mapreduce/src/test/org/apache/orc/mapreduce/TestMapReduceFiltering.java index f8bbf1fb688..0eb9173738f 100644 --- a/java/mapreduce/src/test/org/apache/orc/mapreduce/TestMapReduceFiltering.java +++ b/java/mapreduce/src/test/org/apache/orc/mapreduce/TestMapReduceFiltering.java @@ -77,8 +77,7 @@ public void readWithSArg() throws IOException, InterruptedException { TaskAttemptContext attemptContext = new TaskAttemptContextImpl(conf, id); FilterTestUtil.readStart(); org.apache.hadoop.mapreduce.RecordReader r = - new OrcInputFormat().createRecordReader(split, - attemptContext); + new OrcInputFormat().createRecordReader(split, attemptContext); long rowCount = validateFilteredRecordReader(r); double p = FilterTestUtil.readPercentage(FilterTestUtil.readEnd(), fs.getFileStatus(filePath).getLen()); @@ -102,8 +101,7 @@ public void readWithSArgAsFilter() throws IOException, InterruptedException { TaskAttemptContext attemptContext = new TaskAttemptContextImpl(conf, id); FilterTestUtil.readStart(); org.apache.hadoop.mapreduce.RecordReader r = - new OrcInputFormat().createRecordReader(split, - attemptContext); + new OrcInputFormat().createRecordReader(split, attemptContext); long rowCount = validateFilteredRecordReader(r); double p = FilterTestUtil.readPercentage(FilterTestUtil.readEnd(), fs.getFileStatus(filePath).getLen()); @@ -140,8 +138,7 @@ private void testSingleRowWfilter(long idx) throws IOException, InterruptedExcep TaskAttemptContext attemptContext = new TaskAttemptContextImpl(conf, id); FilterTestUtil.readStart(); org.apache.hadoop.mapreduce.RecordReader r = - new OrcInputFormat().createRecordReader(split, - attemptContext); + new OrcInputFormat().createRecordReader(split, attemptContext); long rowCount = 0; while (r.nextKeyValue()) { validateLimitedRow(r.getCurrentValue(), idx); @@ -151,8 +148,8 @@ private void testSingleRowWfilter(long idx) throws IOException, InterruptedExcep assertEquals(1, rowCount); } - private static long validateFilteredRecordReader(org.apache.hadoop.mapreduce.RecordReader rr) + private static long validateFilteredRecordReader( + org.apache.hadoop.mapreduce.RecordReader rr) throws IOException, InterruptedException { long rowCount = 0; while (rr.nextKeyValue()) { diff --git a/java/tools/src/test/org/apache/orc/impl/TestRLEv2.java b/java/tools/src/test/org/apache/orc/impl/TestRLEv2.java index 196b1a66bfd..2c9b7e55551 100644 --- a/java/tools/src/test/org/apache/orc/impl/TestRLEv2.java +++ b/java/tools/src/test/org/apache/orc/impl/TestRLEv2.java @@ -327,9 +327,9 @@ public void testBaseValueLimit() throws Exception { VectorizedRowBatch batch = schema.createRowBatch(); //the minimum value is beyond RunLengthIntegerWriterV2.BASE_VALUE_LIMIT - long[] input = {-9007199254740992l,-8725724278030337l,-1125762467889153l, -1l,-9007199254740992l, - -9007199254740992l, -497l,127l,-1l,-72057594037927936l,-4194304l,-9007199254740992l,-4503599593816065l, - -4194304l,-8936830510563329l,-9007199254740992l, -1l, -70334384439312l,-4063233l, -6755399441973249l}; + long[] input = {-9007199254740992L,-8725724278030337L,-1125762467889153L, -1L,-9007199254740992L, + -9007199254740992L, -497L,127L,-1L,-72057594037927936L,-4194304L,-9007199254740992L,-4503599593816065L, + -4194304L,-8936830510563329L,-9007199254740992L, -1L, -70334384439312L,-4063233L, -6755399441973249L}; for(long data: input) { appendInt(batch, data); } diff --git a/java/tools/src/test/org/apache/orc/tools/TestFileDump.java b/java/tools/src/test/org/apache/orc/tools/TestFileDump.java index ce916d27cc7..a8dc70a9948 100644 --- a/java/tools/src/test/org/apache/orc/tools/TestFileDump.java +++ b/java/tools/src/test/org/apache/orc/tools/TestFileDump.java @@ -285,9 +285,9 @@ public void testDump() throws Exception { writer.addRowBatch(batch); } writer.addUserMetadata("hive.acid.key.index", - StandardCharsets.UTF_8.encode("1,1,1;2,3,5;")); + StandardCharsets.UTF_8.encode("1,1,1;2,3,5;")); writer.addUserMetadata("some.user.property", - StandardCharsets.UTF_8.encode("foo#bar$baz&")); + StandardCharsets.UTF_8.encode("foo#bar$baz&")); writer.close(); assertEquals(2079000, writer.getRawDataSize()); assertEquals(21000, writer.getNumberOfRows()); @@ -334,7 +334,7 @@ public void testDataDump() throws Exception { format.parse("2014-11-25 00:00:00").getTime())), "string", "hello", - "hello", + "hello", m, Arrays.asList(100, 200), 10, "foo");