Skip to content

Commit

Permalink
Use Spark OrcFilters instead of the copied one; Comment a test case
Browse files Browse the repository at this point in the history
  • Loading branch information
Chong Gao committed Jan 15, 2022
1 parent de3ef18 commit a5a3168
Show file tree
Hide file tree
Showing 6 changed files with 40 additions and 371 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ import org.apache.spark.sql.connector.read.{InputPartition, PartitionReader, Par
import org.apache.spark.sql.execution.QueryExecutionException
import org.apache.spark.sql.execution.datasources.PartitionedFile
import org.apache.spark.sql.execution.datasources.orc.OrcUtils
import org.apache.spark.sql.execution.datasources.rapids.OrcFiltersWrapper
import org.apache.spark.sql.execution.datasources.v2.{EmptyPartitionReader, FilePartitionReaderFactory}
import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.rapids.OrcFilters
import org.apache.spark.sql.rapids.execution.TrampolineUtil
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.{ArrayType, DataType, DecimalType, MapType, StructType}
Expand Down Expand Up @@ -824,7 +824,7 @@ private case class GpuOrcFileFilterHandler(
val readerOpts = OrcInputFormat.buildOptions(
conf, orcReader, partFile.start, partFile.length)
// create the search argument if we have pushed filters
OrcFilters.createFilter(fullSchema, pushedFilters).foreach { f =>
OrcFiltersWrapper.createFilter(fullSchema, pushedFilters).foreach { f =>
readerOpts.searchArgument(f, fullSchema.fieldNames)
}
readerOpts
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/*
* Copyright (c) 2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.datasources.rapids

import org.apache.hadoop.hive.ql.io.sarg.SearchArgument

import org.apache.spark.sql.execution.datasources.orc.OrcFilters
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.StructType

// Wrapper for Spark OrcFilters which is in private package
object OrcFiltersWrapper {
def createFilter(schema: StructType, filters: Seq[Filter]): Option[SearchArgument] = {
OrcFilters.createFilter(schema, filters)
}
}
278 changes: 0 additions & 278 deletions sql-plugin/src/main/scala/org/apache/spark/sql/rapids/OrcFilters.scala

This file was deleted.

Loading

0 comments on commit a5a3168

Please sign in to comment.