Skip to content

Commit

Permalink
test: add unit test for array_contains function
Browse files Browse the repository at this point in the history
  • Loading branch information
dharanad committed Dec 11, 2024
1 parent eaa6cf6 commit edcc168
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 1 deletion.
1 change: 0 additions & 1 deletion native/core/src/execution/datafusion/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -737,7 +737,6 @@ impl PhysicalPlanner {
)))
}
ExprStruct::ArrayContains(expr) => {
println!("dharan code got executed");
let src_array_expr =
self.create_expr(expr.left.as_ref().unwrap(), Arc::clone(&input_schema))?;
let key_expr =
Expand Down
10 changes: 10 additions & 0 deletions spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2390,4 +2390,14 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
checkSparkAnswer(df.select("arrUnsupportedArgs"))
}
}

test("array_contains") {
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test.parquet")
makeParquetFileAllTypes(path, dictionaryEnabled = false, n = 10000)
spark.read.parquet(path.toString).createOrReplaceTempView("t1");
checkSparkAnswerAndOperator(
spark.sql("SELECT array_contains(array(_2, _3, _4), _2) FROM t1"))
}
}
}

0 comments on commit edcc168

Please sign in to comment.