diff --git a/datafusion/expr/src/built_in_function.rs b/datafusion/expr/src/built_in_function.rs index de046cde8991..5040396913e9 100644 --- a/datafusion/expr/src/built_in_function.rs +++ b/datafusion/expr/src/built_in_function.rs @@ -516,25 +516,19 @@ impl BuiltinScalarFunction { Ok(data_type) } BuiltinScalarFunction::ArrayAppend => Ok(input_expr_types[0].clone()), + // Array concat allows multiple dimensions of arrays, i.e. array_concat(1D, 3D, 2D), find the largest one between them. BuiltinScalarFunction::ArrayConcat => { let mut expr_type = Null; let mut max_dims = 0; for input_expr_type in input_expr_types { - match input_expr_type { - List(field) => { - if !field.data_type().equals_datatype(&Null) { - let dims = self.return_dimension(input_expr_type.clone()); - if max_dims < dims { - max_dims = dims; - expr_type = input_expr_type.clone(); - } + if let List(field) = input_expr_type { + if !field.data_type().equals_datatype(&Null) { + let dims = self.return_dimension(input_expr_type.clone()); + if max_dims < dims { + max_dims = dims; + expr_type = input_expr_type.clone(); } } - _ => { - return plan_err!( - "The {self} function can only accept list as the args." - ) - } } } diff --git a/datafusion/optimizer/src/analyzer/type_coercion.rs b/datafusion/optimizer/src/analyzer/type_coercion.rs index ba4b5d7b175c..5f036d760c9c 100644 --- a/datafusion/optimizer/src/analyzer/type_coercion.rs +++ b/datafusion/optimizer/src/analyzer/type_coercion.rs @@ -19,7 +19,7 @@ use std::sync::Arc; -use arrow::datatypes::{DataType, IntervalUnit}; +use arrow::datatypes::{DataType, Field, IntervalUnit}; use datafusion_common::config::ConfigOptions; use datafusion_common::tree_node::{RewriteRecursion, TreeNodeRewriter}; @@ -553,6 +553,198 @@ fn coerce_arguments_for_signature( .collect::>>() } +// TODO: Move this function to arrow-rs or common array utils module +// base type is the non-list type +fn base_type(data_type: &DataType) -> Result { + match data_type { + DataType::List(field) => match field.data_type() { + DataType::List(_) => base_type(field.data_type()), + base_type => Ok(base_type.clone()), + }, + + _ => Ok(data_type.clone()), + } +} + +// TODO: Move this function to arrow-rs or common array utils module +// Build a list from the given base type +// e.g. Int64 -> List[Int64] +fn coerced_type_from_base_type( + data_type: &DataType, + base_type: &DataType, +) -> Result { + match data_type { + DataType::List(field) => match field.data_type() { + DataType::List(_) => Ok(DataType::List(Arc::new(Field::new( + field.name(), + coerced_type_from_base_type(field.data_type(), base_type)?, + field.is_nullable(), + )))), + _ => Ok(DataType::List(Arc::new(Field::new( + field.name(), + base_type.clone(), + field.is_nullable(), + )))), + }, + + _ => Ok(base_type.clone()), + } +} + +// Replace inner nulls with coerced types +// i.e. list[i64], list[null] -> list[i64], list[i64] +fn replace_inner_nulls_with_coerced_types_( + coerced_types: Vec, +) -> Result> { + let first_non_null_base_type = coerced_types + .iter() + .map(base_type) + .find(|t| t.is_ok() && t.as_ref().unwrap() != &DataType::Null) + .map(|t| t.unwrap()); + + if let Some(data_type) = first_non_null_base_type { + coerced_types + .iter() + .map(|t| { + if base_type(t)? == DataType::Null { + coerced_type_from_base_type(t, &data_type) + } else { + Ok(t.clone()) + } + }) + .collect::>>() + } else { + Ok(coerced_types) + } +} + +// Directly replace null with coerced type +// i.e. list[utf8], null -> list[utf8], list[utf8] +fn replace_nulls_with_coerced_types( + coerced_types: Vec, +) -> Result> { + let first_non_null_type = coerced_types.iter().find(|&t| { + let base_t = base_type(t); + base_t.is_ok() && base_t.as_ref().unwrap() != &DataType::Null + }); + + if let Some(data_type) = first_non_null_type { + coerced_types + .iter() + .map(|t| { + if base_type(t)? == DataType::Null { + Ok(data_type.clone()) + } else { + Ok(t.clone()) + } + }) + .collect::>>() + } else { + Ok(coerced_types) + } +} + +fn validate_array_function_arguments( + fun: &BuiltinScalarFunction, + input_types: &[DataType], +) -> Result<()> { + match fun { + BuiltinScalarFunction::ArrayConcat => { + // Dimension check + for expr_type in input_types.iter() { + if let DataType::List(_) = expr_type { + continue; + } else { + return plan_err!( + "The array_concat function can only accept list as the args" + ); + } + } + Ok(()) + } + // Add more cases for other array-related functions + _ => Ok(()), + } +} + +fn coerced_array_types_without_nulls(input_types: &[DataType]) -> Result> { + // Get base type for each input type + // e.g List[Int64] -> Int64 + // List[List[Int64]] -> Int64 + // Int64 -> Int64 + let base_types = input_types + .iter() + .map(base_type) + .collect::>>()?; + + // Get the coerced type with comparison coercion + let coerced_base_type = base_types + .iter() + .skip(1) + .fold(base_types.first().unwrap().clone(), |acc, x| { + comparison_coercion(&acc, x).unwrap_or(acc) + }); + + // Re-build the coerced type from base type, ignore null since it is difficult to determine the type for it at first scan + let coerced_types = input_types + .iter() + .map(|data_type| + // Special cases for null (Null) or empty array (List[Null]), type is determined based on array function + if base_type(data_type)? == DataType::Null { + Ok(data_type.clone()) + } else { + coerced_type_from_base_type(data_type, &coerced_base_type) + }) + .collect::>>()?; + + Ok(coerced_types) +} + +fn coerced_array_nulls( + fun: &BuiltinScalarFunction, + coerced_types: Vec, +) -> Result> { + // Convert Null to coerced expression + match fun { + // MakeArray(elements...): each element has the same type, convert null to the non-null type. + BuiltinScalarFunction::MakeArray => { + replace_nulls_with_coerced_types(coerced_types) + } + // ArrayAppend(list, element): null is only possible with the element, convert it to the list inner type + // ArrayPrepend(element, list): null is only possible with the element, convert it to the list inner type + // ArrayConcat: convert null to non-null at this step, dimension of list is not changed + BuiltinScalarFunction::ArrayAppend + | BuiltinScalarFunction::ArrayPrepend + | BuiltinScalarFunction::ArrayConcat => { + replace_inner_nulls_with_coerced_types_(coerced_types) + } + _ => Ok(coerced_types), + } +} + +// Coerce array arguments types for array functions, convert type or return error for incompatible types at this step +fn coerce_array_args( + fun: &BuiltinScalarFunction, + expressions: &[Expr], + schema: &DFSchema, +) -> Result> { + let input_types = expressions + .iter() + .map(|e| e.get_type(schema)) + .collect::>>()?; + // TODO: We may move this check outside of type coercion + // Array concat is moved here since handle this before null coercion is easier and make senses to block the invalid arguments before type coercion. + validate_array_function_arguments(fun, input_types.as_slice())?; + // coercion is break down into two steps, since not all array functions have the same coercion rules for nulls + let coerced_types = coerced_array_types_without_nulls(input_types.as_slice())?; + let coerced_types = coerced_array_nulls(fun, coerced_types)?; + expressions + .iter() + .zip(coerced_types.iter()) + .map(|(expr, coerced_type)| cast_expr(expr, coerced_type, schema)) + .collect::>>() +} + fn coerce_arguments_for_fun( expressions: &[Expr], schema: &DFSchema, @@ -581,27 +773,15 @@ fn coerce_arguments_for_fun( .collect::>>()?; } - if *fun == BuiltinScalarFunction::MakeArray { - // Find the final data type for the function arguments - let current_types = expressions - .iter() - .map(|e| e.get_type(schema)) - .collect::>>()?; - - let new_type = current_types - .iter() - .skip(1) - .fold(current_types.first().unwrap().clone(), |acc, x| { - comparison_coercion(&acc, x).unwrap_or(acc) - }); - - return expressions - .iter() - .zip(current_types) - .map(|(expr, from_type)| cast_array_expr(expr, &from_type, &new_type, schema)) - .collect(); + match fun { + BuiltinScalarFunction::MakeArray + | BuiltinScalarFunction::ArrayAppend + | BuiltinScalarFunction::ArrayPrepend + | BuiltinScalarFunction::ArrayConcat => { + coerce_array_args(fun, expressions.as_slice(), schema) + } + _ => Ok(expressions), } - Ok(expressions) } /// Cast `expr` to the specified type, if possible @@ -609,20 +789,6 @@ fn cast_expr(expr: &Expr, to_type: &DataType, schema: &DFSchema) -> Result expr.clone().cast_to(to_type, schema) } -/// Cast array `expr` to the specified type, if possible -fn cast_array_expr( - expr: &Expr, - from_type: &DataType, - to_type: &DataType, - schema: &DFSchema, -) -> Result { - if from_type.equals_datatype(&DataType::Null) { - Ok(expr.clone()) - } else { - cast_expr(expr, to_type, schema) - } -} - /// Returns the coerced exprs for each `input_exprs`. /// Get the coerced data type from `aggregate_rule::coerce_types` and add `try_cast` if the /// data type of `input_exprs` need to be coerced. diff --git a/datafusion/physical-expr/src/array_expressions.rs b/datafusion/physical-expr/src/array_expressions.rs index 06432b615c5d..ef2675eb6569 100644 --- a/datafusion/physical-expr/src/array_expressions.rs +++ b/datafusion/physical-expr/src/array_expressions.rs @@ -327,7 +327,9 @@ fn array_array(args: &[ArrayRef], data_type: DataType) -> Result { } else if arg.as_any().downcast_ref::().is_some() { arrays.push(ListOrNull::Null); } else { - return internal_err!("Unsupported argument type for array"); + return internal_err!( + "(array_array) Unsupported argument type for array" + ); } } @@ -674,7 +676,7 @@ pub fn array_append(args: &[ArrayRef]) -> Result { check_datatypes("array_append", &[arr.values(), element])?; let res = match arr.value_type() { - DataType::List(_) => concat_internal(args)?, + DataType::List(_) => array_concat(args)?, DataType::Null => { return Ok(array(&[ColumnarValue::Array(args[1].clone())])?.into_array(1)) } @@ -750,7 +752,7 @@ pub fn array_prepend(args: &[ArrayRef]) -> Result { check_datatypes("array_prepend", &[element, arr.values()])?; let res = match arr.value_type() { - DataType::List(_) => concat_internal(args)?, + DataType::List(_) => array_concat(args)?, DataType::Null => { return Ok(array(&[ColumnarValue::Array(args[0].clone())])?.into_array(1)) } @@ -810,7 +812,9 @@ fn align_array_dimensions(args: Vec) -> Result> { aligned_args } -fn concat_internal(args: &[ArrayRef]) -> Result { +/// Array_concat/Array_cat SQL function +pub fn array_concat(args: &[ArrayRef]) -> Result { + // Dimension check and null conversion is done in `type coercion` step. let args = align_array_dimensions(args.to_vec())?; let list_arrays = @@ -863,22 +867,6 @@ fn concat_internal(args: &[ArrayRef]) -> Result { Ok(Arc::new(list)) } -/// Array_concat/Array_cat SQL function -pub fn array_concat(args: &[ArrayRef]) -> Result { - let mut new_args = vec![]; - for arg in args { - let (ndim, lower_data_type) = - compute_array_ndims_with_datatype(Some(arg.clone()))?; - if ndim.is_none() || ndim == Some(1) { - return not_impl_err!("Array is not type '{lower_data_type:?}'."); - } else if !lower_data_type.equals_datatype(&DataType::Null) { - new_args.push(arg.clone()); - } - } - - concat_internal(new_args.as_slice()) -} - macro_rules! general_repeat { ($ELEMENT:expr, $COUNT:expr, $ARRAY_TYPE:ident) => {{ let mut offsets: Vec = vec![0]; diff --git a/datafusion/sql/src/expr/value.rs b/datafusion/sql/src/expr/value.rs index 158054ce6cce..d6b62bcce751 100644 --- a/datafusion/sql/src/expr/value.rs +++ b/datafusion/sql/src/expr/value.rs @@ -19,15 +19,30 @@ use crate::planner::{ContextProvider, PlannerContext, SqlToRel}; use arrow::compute::kernels::cast_utils::parse_interval_month_day_nano; use arrow_schema::DataType; use datafusion_common::{ - not_impl_err, plan_err, DFSchema, DataFusionError, Result, ScalarValue, + internal_err, not_impl_err, plan_err, DFSchema, DataFusionError, Result, ScalarValue, }; use datafusion_expr::expr::{BinaryExpr, Placeholder}; +use datafusion_expr::type_coercion::binary::comparison_coercion; use datafusion_expr::{lit, Expr, Operator}; use log::debug; use sqlparser::ast::{BinaryOperator, Expr as SQLExpr, Interval, Value}; use sqlparser::parser::ParserError::ParserError; use std::collections::HashSet; +// Coerce scalar value type +// i.e. ScalarValue::I64(1) -> ScalarValue::F64(1.0) +macro_rules! coerce_scalar_value { + ($data_type:ty, $coerced_type:ty, $e:expr) => {{ + let val: $data_type = $e.clone().try_into().unwrap(); + let casted_sv = ScalarValue::try_from(val as $coerced_type); + if let Ok(casted_sv) = casted_sv { + Ok(casted_sv) + } else { + internal_err!("Failed to cast value") + } + }}; +} + impl<'a, S: ContextProvider> SqlToRel<'a, S> { pub(crate) fn parse_value( &self, @@ -150,17 +165,62 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> { } } - let data_types: HashSet = + let data_types = values.iter().map(|e| e.get_datatype()).collect::>(); + let seen_types: HashSet = values.iter().map(|e| e.get_datatype()).collect(); - if data_types.is_empty() { - Ok(lit(ScalarValue::new_list(None, DataType::Utf8))) - } else if data_types.len() > 1 { - not_impl_err!("Arrays with different types are not supported: {data_types:?}") - } else { - let data_type = values[0].get_datatype(); + match seen_types.len() { + 0 => Ok(lit(ScalarValue::new_list(None, DataType::Utf8))), + 1 => { + let data_type = values[0].get_datatype(); + Ok(lit(ScalarValue::new_list(Some(values), data_type))) + } + _ => { + let coerced_type = data_types + .iter() + .skip(1) + .fold(data_types[0].clone(), |acc, d| { + comparison_coercion(&acc, d).unwrap_or(acc) + }); + let values = values + .iter() + .map(|e| { + + let data_type = e.get_datatype(); + if data_type == coerced_type { + return Ok(e.clone()); + } + + match &e.get_datatype() { + DataType::Null => { + // i.e. Int64 ScalarValue(Int64(None)) + ScalarValue::try_from(&coerced_type) + } + data_type => { + // Convert to coerced type + // comparison_coercion: Float64 > Int64 > UInt64 + match (data_type, &coerced_type) { + (&DataType::Int64, &DataType::Float64) => { + coerce_scalar_value!(i64, f64, e) + } + (&DataType::UInt64, &DataType::Float64) => { + coerce_scalar_value!(u64, f64, e) + } + (&DataType::UInt64, &DataType::Int64) => { + coerce_scalar_value!(u64, i64, e) + } + _ => { + not_impl_err!("Unsupported array literal type coercion from {:?} to {:?}", data_type, coerced_type) + } + } - Ok(lit(ScalarValue::new_list(Some(values), data_type))) + } + } + }) + .collect::>>()?; + + Ok(lit(ScalarValue::new_list(Some(values), coerced_type))) + } } } diff --git a/datafusion/sql/tests/sql_integration.rs b/datafusion/sql/tests/sql_integration.rs index 8b4d9686a4f5..2e048e5cc78d 100644 --- a/datafusion/sql/tests/sql_integration.rs +++ b/datafusion/sql/tests/sql_integration.rs @@ -24,8 +24,7 @@ use sqlparser::dialect::{Dialect, GenericDialect, HiveDialect, MySqlDialect}; use datafusion_common::plan_err; use datafusion_common::{ - assert_contains, config::ConfigOptions, DataFusionError, Result, ScalarValue, - TableReference, + config::ConfigOptions, DataFusionError, Result, ScalarValue, TableReference, }; use datafusion_expr::{ logical_plan::{LogicalPlan, Prepare}, @@ -1364,18 +1363,6 @@ fn select_interval_out_of_range() { ); } -#[test] -fn select_array_no_common_type() { - let sql = "SELECT [1, true, null]"; - let err = logical_plan(sql).expect_err("query should have failed"); - - // HashSet doesn't guarantee order - assert_contains!( - err.to_string(), - r#"Arrays with different types are not supported: "# - ); -} - #[test] fn recursive_ctes() { let sql = " diff --git a/datafusion/sqllogictest/test_files/array.slt b/datafusion/sqllogictest/test_files/array.slt index f54c2f71718c..2e2554c3be3b 100644 --- a/datafusion/sqllogictest/test_files/array.slt +++ b/datafusion/sqllogictest/test_files/array.slt @@ -304,10 +304,7 @@ select column1, column2, column3, column4 from nested_arrays_with_repeating_elem [[19, 20, 21], [19, 20, 21], [19, 20, 21], [22, 23, 24], [19, 20, 21], [25, 26, 27], [19, 20, 21], [22, 23, 24], [19, 20, 21], [19, 20, 21]] [19, 20, 21] [28, 29, 30] 5 [[28, 29, 30], [31, 32, 33], [34, 35, 36], [28, 29, 30], [31, 32, 33], [34, 35, 36], [28, 29, 30], [31, 32, 33], [34, 35, 36], [28, 29, 30]] [28, 29, 30] [37, 38, 39] 10 - ### Array index - - ## array[i] # single index with scalars #1 (positive index) @@ -478,6 +475,14 @@ select column1[0:5], column2[0:3], column3[0:9] from arrays; ### Array function tests +## select array +query ? +select [1, null]; +---- +[1, ] + +query error DataFusion error: This feature is not implemented: Unsupported array literal type coercion from Boolean to Int64 +select [1, true, null]; ## make_array (aliases: `make_list`) @@ -827,7 +832,7 @@ select array_slice(make_array(1, 2, 3, 4, 5), 0, -4), array_slice(make_array('h' ---- [1] [h, e] -# array_slice scalar function #13 (with negative number and NULL) +# array_slice scalar function #13 (with positive number and NULL) query error select array_slice(make_array(1, 2, 3, 4, 5), 2, NULL), array_slice(make_array('h', 'e', 'l', 'l', 'o'), 3, NULL); @@ -941,13 +946,33 @@ select make_array(['a','b'], null); ## array_append (aliases: `list_append`, `array_push_back`, `list_push_back`) -# TODO: array_append with NULLs # array_append scalar function #1 -# query ? -# select array_append(make_array(), 4); -# ---- -# [4] +query ? +select array_append(make_array(null), 4); +---- +[, 4] +query ? +select array_append(make_array(1, 2, null), 4); +---- +[1, 2, , 4] + +query ? +select array_append(make_array(), 4); +---- +[4] + +query ? +select array_append([[1,2,3]], [4, null, 6]); +---- +[[1, 2, 3], [4, , 6]] + +query ? +select array_append([[1,2,3]], [null]); +---- +[[1, 2, 3], []] + +# TODO: array_append with pure nulls # array_append scalar function #2 # query ?? # select array_append(make_array(), make_array()), array_append(make_array(), make_array(4)); @@ -984,6 +1009,26 @@ select list_push_back(make_array(1, 2, 3), 4), list_push_back(make_array(1.0, 2. ---- [1, 2, 3, 4] [1.0, 2.0, 3.0, 4.0] [h, e, l, l, o] +query ? +select array_append([1, 2, 3], null); +---- +[1, 2, 3, ] + +query ? +select array_append([1, 2, null, 4], 5); +---- +[1, 2, , 4, 5] + +query ? +select array_append([1, 2.1, 3], 4.0); +---- +[1.0, 2.1, 3.0, 4.0] + +query ? +select array_append([1, 2.1, 3], null); +---- +[1.0, 2.1, 3.0, ] + # array_append with columns #1 query ? select array_append(column1, column2) from arrays_values; @@ -1025,13 +1070,33 @@ select array_append(column1, make_array(1, 11, 111)), array_append(make_array(ma ## array_prepend (aliases: `list_prepend`, `array_push_front`, `list_push_front`) -# TODO: array_prepend with NULLs # array_prepend scalar function #1 -# query ? -# select array_prepend(4, make_array()); -# ---- -# [4] +query ? +select array_prepend(4, make_array()); +---- +[4] + +query ? +select array_prepend(4, make_array(null)); +---- +[4, ] + +query ? +select array_prepend(null, make_array(1, 2, 3)); +---- +[, 1, 2, 3] +query ? +select array_prepend([1, null, 3], make_array([4, 5, 6])); +---- +[[1, , 3], [4, 5, 6]] + +query ? +select array_prepend(make_array(null), make_array([4, 5, 6])); +---- +[[], [4, 5, 6]] + +# TODO: array_prepend with pure nulls # array_prepend scalar function #2 # query ?? # select array_prepend(make_array(), make_array()), array_prepend(make_array(4), make_array()); @@ -1068,6 +1133,21 @@ select list_push_front(1, make_array(2, 3, 4)), list_push_front(1.0, make_array( ---- [1, 2, 3, 4] [1.0, 2.0, 3.0, 4.0] [h, e, l, l, o] +query ? +select array_prepend(null, make_array(1, 2, 3)); +---- +[, 1, 2, 3] + +query ? +select array_prepend(null, make_array(1, 2.2, 3)); +---- +[, 1.0, 2.2, 3.0] + +query ? +select array_prepend(1.2, make_array(1, 2, 3)); +---- +[1.2, 1.0, 2.0, 3.0] + # array_prepend with columns #1 query ? select array_prepend(column2, column1) from arrays_values; @@ -1176,9 +1256,17 @@ select array_repeat([1], column3), array_repeat(column1, 3) from arrays_values_w ## array_concat (aliases: `array_cat`, `list_concat`, `list_cat`) # array_concat error -query error DataFusion error: Error during planning: The array_concat function can only accept list as the args\. +query error DataFusion error: SQL error: ParserError\("Expected an SQL statement, found: caused"\) +caused by +Error during planning: The array_concat function can only accept list as the args select array_concat(1, 2); +query error DataFusion error: type_coercion\ncaused by\nError during planning: The array_concat function can only accept list as the args +select array_concat(make_array(make_array(1, 2), make_array(3, 4)), null); + +query error DataFusion error: type_coercion\ncaused by\nError during planning: The array_concat function can only accept list as the args +select array_concat(make_array(make_array(1, 2), make_array(3, 4)), 5); + # array_concat scalar function #1 query ?? select array_concat(make_array(1, 2, 3), make_array(4, 5, 6), make_array(7, 8, 9)), array_concat(make_array([1], [2]), make_array([3], [4])); @@ -1219,19 +1307,19 @@ select array_concat(make_array(), make_array(2, 3)); query ? select array_concat(make_array(make_array(1, 2), make_array(3, 4)), make_array(make_array())); ---- -[[1, 2], [3, 4]] +[[1, 2], [3, 4], []] # array_concat scalar function #8 (with empty arrays) query ? select array_concat(make_array(make_array(1, 2), make_array(3, 4)), make_array(make_array()), make_array(make_array(), make_array()), make_array(make_array(5, 6), make_array(7, 8))); ---- -[[1, 2], [3, 4], [5, 6], [7, 8]] +[[1, 2], [3, 4], [], [], [], [5, 6], [7, 8]] # array_concat scalar function #9 (with empty arrays) query ? select array_concat(make_array(make_array()), make_array(make_array(1, 2), make_array(3, 4))); ---- -[[1, 2], [3, 4]] +[[], [1, 2], [3, 4]] # array_cat scalar function #10 (function alias `array_concat`) query ?? @@ -1287,6 +1375,26 @@ select array_concat(make_array([30, 40]), make_array(10, 20), make_array([[50, 6 ---- [[[[30, 40]]], [[[10, 20]]], [[[50, 60]]], [[[70, 80]]], [[[80, 40]]]] +# array_concat with different dimensions and size #7 (2DL2 + 1DL1) +query ? +select array_concat(make_array([1,2], [3,4]), make_array(5)); +---- +[[1, 2], [3, 4], [5]] + +# array_concat with different dimensions and size of null #8 (2D2L + 1D1L) +# Duckdb Result: [[1, 2], [3, 4], null] +# The behavior is not the same to Duckdb, but since we try to convert null to compatible type for each array function, 1d empty array is perfered to null. +query ? +select array_concat(make_array(make_array(1, 2), make_array(3, 4)), make_array(null)); +---- +[[1, 2], [3, 4], []] + +# array_concat with different dimensions of null #9 (2D + 1D) +query ? +select array_concat(make_array(make_array(1, 2), make_array(3, 4)), make_array(null, null)); +---- +[[1, 2], [3, 4], [, ]] + # array_concat column-wise #1 query ? select array_concat(column1, make_array(0)) from arrays_values_without_nulls;