From fbd03d9e69cd3d8b1dbe7c6323ae1cdf2849647b Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Wed, 21 Jun 2023 16:36:11 +0530 Subject: [PATCH 1/5] Move coercion related methods to a util class --- .../io/trino/plugin/hive/HivePageSource.java | 272 +--------------- .../plugin/hive/coercions/CharCoercer.java | 2 +- .../plugin/hive/coercions/CoercionUtils.java | 296 ++++++++++++++++++ .../plugin/hive/coercions/VarcharCoercer.java | 2 +- .../hive/coercions/TestTimestampCoercer.java | 2 +- 5 files changed, 300 insertions(+), 274 deletions(-) create mode 100644 plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java index 0300c6d9b2a8..684954326966 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java @@ -17,44 +17,19 @@ import io.trino.filesystem.Location; import io.trino.plugin.hive.HivePageSourceProvider.BucketAdaptation; import io.trino.plugin.hive.HivePageSourceProvider.ColumnMapping; -import io.trino.plugin.hive.coercions.CharCoercer; -import io.trino.plugin.hive.coercions.DoubleToFloatCoercer; -import io.trino.plugin.hive.coercions.FloatToDoubleCoercer; -import io.trino.plugin.hive.coercions.IntegerNumberToVarcharCoercer; -import io.trino.plugin.hive.coercions.IntegerNumberUpscaleCoercer; -import io.trino.plugin.hive.coercions.TimestampCoercer.LongTimestampToVarcharCoercer; -import io.trino.plugin.hive.coercions.TimestampCoercer.ShortTimestampToVarcharCoercer; -import io.trino.plugin.hive.coercions.VarcharCoercer; -import io.trino.plugin.hive.coercions.VarcharToIntegerNumberCoercer; -import io.trino.plugin.hive.type.Category; -import io.trino.plugin.hive.type.ListTypeInfo; -import io.trino.plugin.hive.type.MapTypeInfo; import io.trino.plugin.hive.type.TypeInfo; import io.trino.plugin.hive.util.HiveBucketing.BucketingVersion; import io.trino.spi.Page; import io.trino.spi.TrinoException; -import io.trino.spi.block.ArrayBlock; import io.trino.spi.block.Block; -import io.trino.spi.block.ColumnarArray; -import io.trino.spi.block.ColumnarMap; -import io.trino.spi.block.ColumnarRow; -import io.trino.spi.block.DictionaryBlock; import io.trino.spi.block.LazyBlock; import io.trino.spi.block.LazyBlockLoader; -import io.trino.spi.block.RowBlock; import io.trino.spi.block.RunLengthEncodedBlock; import io.trino.spi.connector.ConnectorPageSource; import io.trino.spi.connector.RecordCursor; import io.trino.spi.metrics.Metrics; -import io.trino.spi.type.ArrayType; -import io.trino.spi.type.CharType; -import io.trino.spi.type.DecimalType; -import io.trino.spi.type.MapType; -import io.trino.spi.type.RowType; -import io.trino.spi.type.TimestampType; import io.trino.spi.type.Type; import io.trino.spi.type.TypeManager; -import io.trino.spi.type.VarcharType; import it.unimi.dsi.fastutil.ints.IntArrayList; import javax.annotation.Nullable; @@ -76,26 +51,8 @@ import static io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES; import static io.trino.plugin.hive.HivePageSourceProvider.ColumnMappingKind.EMPTY; import static io.trino.plugin.hive.HivePageSourceProvider.ColumnMappingKind.PREFILLED; -import static io.trino.plugin.hive.HiveType.HIVE_BYTE; -import static io.trino.plugin.hive.HiveType.HIVE_DOUBLE; -import static io.trino.plugin.hive.HiveType.HIVE_FLOAT; -import static io.trino.plugin.hive.HiveType.HIVE_INT; -import static io.trino.plugin.hive.HiveType.HIVE_LONG; -import static io.trino.plugin.hive.HiveType.HIVE_SHORT; -import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToDecimalCoercer; -import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToDoubleCoercer; -import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToRealCoercer; -import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToVarcharCoercer; -import static io.trino.plugin.hive.coercions.DecimalCoercers.createDoubleToDecimalCoercer; -import static io.trino.plugin.hive.coercions.DecimalCoercers.createRealToDecimalCoercer; +import static io.trino.plugin.hive.coercions.CoercionUtils.createCoercer; import static io.trino.plugin.hive.util.HiveBucketing.getHiveBucket; -import static io.trino.plugin.hive.util.HiveUtil.extractStructFieldTypes; -import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; -import static io.trino.spi.block.ColumnarArray.toColumnarArray; -import static io.trino.spi.block.ColumnarMap.toColumnarMap; -import static io.trino.spi.block.ColumnarRow.toColumnarRow; -import static io.trino.spi.type.DoubleType.DOUBLE; -import static io.trino.spi.type.RealType.REAL; import static java.lang.String.format; import static java.util.Objects.requireNonNull; @@ -299,233 +256,6 @@ public ConnectorPageSource getPageSource() return delegate; } - public static Optional> createCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) - { - if (fromHiveType.equals(toHiveType)) { - return Optional.empty(); - } - - Type fromType = fromHiveType.getType(typeManager, timestampPrecision); - Type toType = toHiveType.getType(typeManager, timestampPrecision); - - if (toType instanceof VarcharType toVarcharType && (fromHiveType.equals(HIVE_BYTE) || fromHiveType.equals(HIVE_SHORT) || fromHiveType.equals(HIVE_INT) || fromHiveType.equals(HIVE_LONG))) { - return Optional.of(new IntegerNumberToVarcharCoercer<>(fromType, toVarcharType)); - } - if (fromType instanceof VarcharType fromVarcharType && (toHiveType.equals(HIVE_BYTE) || toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG))) { - return Optional.of(new VarcharToIntegerNumberCoercer<>(fromVarcharType, toType)); - } - if (fromType instanceof VarcharType fromVarcharType && toType instanceof VarcharType toVarcharType) { - if (narrowerThan(toVarcharType, fromVarcharType)) { - return Optional.of(new VarcharCoercer(fromVarcharType, toVarcharType)); - } - return Optional.empty(); - } - if (fromType instanceof CharType fromCharType && toType instanceof CharType toCharType) { - if (narrowerThan(toCharType, fromCharType)) { - return Optional.of(new CharCoercer(fromCharType, toCharType)); - } - return Optional.empty(); - } - if (fromHiveType.equals(HIVE_BYTE) && (toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG))) { - return Optional.of(new IntegerNumberUpscaleCoercer<>(fromType, toType)); - } - if (fromHiveType.equals(HIVE_SHORT) && (toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG))) { - return Optional.of(new IntegerNumberUpscaleCoercer<>(fromType, toType)); - } - if (fromHiveType.equals(HIVE_INT) && toHiveType.equals(HIVE_LONG)) { - return Optional.of(new IntegerNumberUpscaleCoercer<>(fromType, toType)); - } - if (fromHiveType.equals(HIVE_FLOAT) && toHiveType.equals(HIVE_DOUBLE)) { - return Optional.of(new FloatToDoubleCoercer()); - } - if (fromHiveType.equals(HIVE_DOUBLE) && toHiveType.equals(HIVE_FLOAT)) { - return Optional.of(new DoubleToFloatCoercer()); - } - if (fromType instanceof DecimalType fromDecimalType && toType instanceof DecimalType toDecimalType) { - return Optional.of(createDecimalToDecimalCoercer(fromDecimalType, toDecimalType)); - } - if (fromType instanceof DecimalType fromDecimalType && toType == DOUBLE) { - return Optional.of(createDecimalToDoubleCoercer(fromDecimalType)); - } - if (fromType instanceof DecimalType fromDecimalType && toType == REAL) { - return Optional.of(createDecimalToRealCoercer(fromDecimalType)); - } - if (fromType instanceof DecimalType fromDecimalType && toType instanceof VarcharType toVarcharType) { - return Optional.of(createDecimalToVarcharCoercer(fromDecimalType, toVarcharType)); - } - if (fromType == DOUBLE && toType instanceof DecimalType toDecimalType) { - return Optional.of(createDoubleToDecimalCoercer(toDecimalType)); - } - if (fromType == REAL && toType instanceof DecimalType toDecimalType) { - return Optional.of(createRealToDecimalCoercer(toDecimalType)); - } - if (fromType instanceof TimestampType timestampType && toType instanceof VarcharType varcharType) { - if (timestampType.isShort()) { - return Optional.of(new ShortTimestampToVarcharCoercer(timestampType, varcharType)); - } - return Optional.of(new LongTimestampToVarcharCoercer(timestampType, varcharType)); - } - if ((fromType instanceof ArrayType) && (toType instanceof ArrayType)) { - return Optional.of(new ListCoercer(typeManager, fromHiveType, toHiveType, timestampPrecision)); - } - if ((fromType instanceof MapType) && (toType instanceof MapType)) { - return Optional.of(new MapCoercer(typeManager, fromHiveType, toHiveType, timestampPrecision)); - } - if ((fromType instanceof RowType) && (toType instanceof RowType)) { - HiveType fromHiveTypeStruct = (fromHiveType.getCategory() == Category.UNION) ? HiveType.toHiveType(fromType) : fromHiveType; - HiveType toHiveTypeStruct = (toHiveType.getCategory() == Category.UNION) ? HiveType.toHiveType(toType) : toHiveType; - - return Optional.of(new StructCoercer(typeManager, fromHiveTypeStruct, toHiveTypeStruct, timestampPrecision)); - } - - throw new TrinoException(NOT_SUPPORTED, format("Unsupported coercion from %s to %s", fromHiveType, toHiveType)); - } - - public static boolean narrowerThan(VarcharType first, VarcharType second) - { - requireNonNull(first, "first is null"); - requireNonNull(second, "second is null"); - if (first.isUnbounded() || second.isUnbounded()) { - return !first.isUnbounded(); - } - return first.getBoundedLength() < second.getBoundedLength(); - } - - public static boolean narrowerThan(CharType first, CharType second) - { - requireNonNull(first, "first is null"); - requireNonNull(second, "second is null"); - return first.getLength() < second.getLength(); - } - - private static class ListCoercer - implements Function - { - private final Optional> elementCoercer; - - public ListCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) - { - requireNonNull(typeManager, "typeManager is null"); - requireNonNull(fromHiveType, "fromHiveType is null"); - requireNonNull(toHiveType, "toHiveType is null"); - requireNonNull(timestampPrecision, "timestampPrecision is null"); - HiveType fromElementHiveType = HiveType.valueOf(((ListTypeInfo) fromHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName()); - HiveType toElementHiveType = HiveType.valueOf(((ListTypeInfo) toHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName()); - this.elementCoercer = createCoercer(typeManager, fromElementHiveType, toElementHiveType, timestampPrecision); - } - - @Override - public Block apply(Block block) - { - if (elementCoercer.isEmpty()) { - return block; - } - ColumnarArray arrayBlock = toColumnarArray(block); - Block elementsBlock = elementCoercer.get().apply(arrayBlock.getElementsBlock()); - boolean[] valueIsNull = new boolean[arrayBlock.getPositionCount()]; - int[] offsets = new int[arrayBlock.getPositionCount() + 1]; - for (int i = 0; i < arrayBlock.getPositionCount(); i++) { - valueIsNull[i] = arrayBlock.isNull(i); - offsets[i + 1] = offsets[i] + arrayBlock.getLength(i); - } - return ArrayBlock.fromElementBlock(arrayBlock.getPositionCount(), Optional.of(valueIsNull), offsets, elementsBlock); - } - } - - private static class MapCoercer - implements Function - { - private final Type toType; - private final Optional> keyCoercer; - private final Optional> valueCoercer; - - public MapCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) - { - requireNonNull(typeManager, "typeManager is null"); - requireNonNull(fromHiveType, "fromHiveType is null"); - requireNonNull(timestampPrecision, "timestampPrecision is null"); - this.toType = toHiveType.getType(typeManager); - HiveType fromKeyHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); - HiveType fromValueHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); - HiveType toKeyHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); - HiveType toValueHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); - this.keyCoercer = createCoercer(typeManager, fromKeyHiveType, toKeyHiveType, timestampPrecision); - this.valueCoercer = createCoercer(typeManager, fromValueHiveType, toValueHiveType, timestampPrecision); - } - - @Override - public Block apply(Block block) - { - ColumnarMap mapBlock = toColumnarMap(block); - Block keysBlock = keyCoercer.isEmpty() ? mapBlock.getKeysBlock() : keyCoercer.get().apply(mapBlock.getKeysBlock()); - Block valuesBlock = valueCoercer.isEmpty() ? mapBlock.getValuesBlock() : valueCoercer.get().apply(mapBlock.getValuesBlock()); - boolean[] valueIsNull = new boolean[mapBlock.getPositionCount()]; - int[] offsets = new int[mapBlock.getPositionCount() + 1]; - for (int i = 0; i < mapBlock.getPositionCount(); i++) { - valueIsNull[i] = mapBlock.isNull(i); - offsets[i + 1] = offsets[i] + mapBlock.getEntryCount(i); - } - return ((MapType) toType).createBlockFromKeyValue(Optional.of(valueIsNull), offsets, keysBlock, valuesBlock); - } - } - - private static class StructCoercer - implements Function - { - private final List>> coercers; - private final Block[] nullBlocks; - - public StructCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) - { - requireNonNull(typeManager, "typeManager is null"); - requireNonNull(fromHiveType, "fromHiveType is null"); - requireNonNull(toHiveType, "toHiveType is null"); - requireNonNull(timestampPrecision, "timestampPrecision is null"); - List fromFieldTypes = extractStructFieldTypes(fromHiveType); - List toFieldTypes = extractStructFieldTypes(toHiveType); - ImmutableList.Builder>> coercers = ImmutableList.builder(); - this.nullBlocks = new Block[toFieldTypes.size()]; - for (int i = 0; i < toFieldTypes.size(); i++) { - if (i >= fromFieldTypes.size()) { - nullBlocks[i] = toFieldTypes.get(i).getType(typeManager).createBlockBuilder(null, 1).appendNull().build(); - coercers.add(Optional.empty()); - } - else { - coercers.add(createCoercer(typeManager, fromFieldTypes.get(i), toFieldTypes.get(i), timestampPrecision)); - } - } - this.coercers = coercers.build(); - } - - @Override - public Block apply(Block block) - { - ColumnarRow rowBlock = toColumnarRow(block); - Block[] fields = new Block[coercers.size()]; - int[] ids = new int[rowBlock.getField(0).getPositionCount()]; - for (int i = 0; i < coercers.size(); i++) { - Optional> coercer = coercers.get(i); - if (coercer.isPresent()) { - fields[i] = coercer.get().apply(rowBlock.getField(i)); - } - else if (i < rowBlock.getFieldCount()) { - fields[i] = rowBlock.getField(i); - } - else { - fields[i] = DictionaryBlock.create(ids.length, nullBlocks[i], ids); - } - } - boolean[] valueIsNull = null; - if (rowBlock.mayHaveNull()) { - valueIsNull = new boolean[rowBlock.getPositionCount()]; - for (int i = 0; i < rowBlock.getPositionCount(); i++) { - valueIsNull[i] = rowBlock.isNull(i); - } - } - return RowBlock.fromFieldBlocks(rowBlock.getPositionCount(), Optional.ofNullable(valueIsNull), fields); - } - } - private static final class CoercionLazyBlockLoader implements LazyBlockLoader { diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CharCoercer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CharCoercer.java index 54df1a03794a..0c9ecdf76cb8 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CharCoercer.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CharCoercer.java @@ -19,7 +19,7 @@ import io.trino.spi.type.CharType; import static com.google.common.base.Preconditions.checkArgument; -import static io.trino.plugin.hive.HivePageSource.narrowerThan; +import static io.trino.plugin.hive.coercions.CoercionUtils.narrowerThan; import static io.trino.spi.type.Chars.truncateToLengthAndTrimSpaces; public class CharCoercer diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java new file mode 100644 index 000000000000..6674d1140bec --- /dev/null +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java @@ -0,0 +1,296 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.coercions; + +import com.google.common.collect.ImmutableList; +import io.trino.plugin.hive.HiveTimestampPrecision; +import io.trino.plugin.hive.HiveType; +import io.trino.plugin.hive.type.Category; +import io.trino.plugin.hive.type.ListTypeInfo; +import io.trino.plugin.hive.type.MapTypeInfo; +import io.trino.spi.TrinoException; +import io.trino.spi.block.ArrayBlock; +import io.trino.spi.block.Block; +import io.trino.spi.block.ColumnarArray; +import io.trino.spi.block.ColumnarMap; +import io.trino.spi.block.ColumnarRow; +import io.trino.spi.block.DictionaryBlock; +import io.trino.spi.block.RowBlock; +import io.trino.spi.type.ArrayType; +import io.trino.spi.type.CharType; +import io.trino.spi.type.DecimalType; +import io.trino.spi.type.MapType; +import io.trino.spi.type.RowType; +import io.trino.spi.type.TimestampType; +import io.trino.spi.type.Type; +import io.trino.spi.type.TypeManager; +import io.trino.spi.type.VarcharType; + +import java.util.List; +import java.util.Optional; +import java.util.function.Function; + +import static io.trino.plugin.hive.HiveType.HIVE_BYTE; +import static io.trino.plugin.hive.HiveType.HIVE_DOUBLE; +import static io.trino.plugin.hive.HiveType.HIVE_FLOAT; +import static io.trino.plugin.hive.HiveType.HIVE_INT; +import static io.trino.plugin.hive.HiveType.HIVE_LONG; +import static io.trino.plugin.hive.HiveType.HIVE_SHORT; +import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToDecimalCoercer; +import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToDoubleCoercer; +import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToRealCoercer; +import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToVarcharCoercer; +import static io.trino.plugin.hive.coercions.DecimalCoercers.createDoubleToDecimalCoercer; +import static io.trino.plugin.hive.coercions.DecimalCoercers.createRealToDecimalCoercer; +import static io.trino.plugin.hive.util.HiveUtil.extractStructFieldTypes; +import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; +import static io.trino.spi.block.ColumnarArray.toColumnarArray; +import static io.trino.spi.block.ColumnarMap.toColumnarMap; +import static io.trino.spi.block.ColumnarRow.toColumnarRow; +import static io.trino.spi.type.DoubleType.DOUBLE; +import static io.trino.spi.type.RealType.REAL; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; + +public final class CoercionUtils +{ + private CoercionUtils() {} + + public static Optional> createCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + { + if (fromHiveType.equals(toHiveType)) { + return Optional.empty(); + } + + Type fromType = fromHiveType.getType(typeManager, timestampPrecision); + Type toType = toHiveType.getType(typeManager, timestampPrecision); + + if (toType instanceof VarcharType toVarcharType && (fromHiveType.equals(HIVE_BYTE) || fromHiveType.equals(HIVE_SHORT) || fromHiveType.equals(HIVE_INT) || fromHiveType.equals(HIVE_LONG))) { + return Optional.of(new IntegerNumberToVarcharCoercer<>(fromType, toVarcharType)); + } + if (fromType instanceof VarcharType fromVarcharType && (toHiveType.equals(HIVE_BYTE) || toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG))) { + return Optional.of(new VarcharToIntegerNumberCoercer<>(fromVarcharType, toType)); + } + if (fromType instanceof VarcharType fromVarcharType && toType instanceof VarcharType toVarcharType) { + if (narrowerThan(toVarcharType, fromVarcharType)) { + return Optional.of(new VarcharCoercer(fromVarcharType, toVarcharType)); + } + return Optional.empty(); + } + if (fromType instanceof CharType fromCharType && toType instanceof CharType toCharType) { + if (narrowerThan(toCharType, fromCharType)) { + return Optional.of(new CharCoercer(fromCharType, toCharType)); + } + return Optional.empty(); + } + if (fromHiveType.equals(HIVE_BYTE) && (toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG))) { + return Optional.of(new IntegerNumberUpscaleCoercer<>(fromType, toType)); + } + if (fromHiveType.equals(HIVE_SHORT) && (toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG))) { + return Optional.of(new IntegerNumberUpscaleCoercer<>(fromType, toType)); + } + if (fromHiveType.equals(HIVE_INT) && toHiveType.equals(HIVE_LONG)) { + return Optional.of(new IntegerNumberUpscaleCoercer<>(fromType, toType)); + } + if (fromHiveType.equals(HIVE_FLOAT) && toHiveType.equals(HIVE_DOUBLE)) { + return Optional.of(new FloatToDoubleCoercer()); + } + if (fromHiveType.equals(HIVE_DOUBLE) && toHiveType.equals(HIVE_FLOAT)) { + return Optional.of(new DoubleToFloatCoercer()); + } + if (fromType instanceof DecimalType fromDecimalType && toType instanceof DecimalType toDecimalType) { + return Optional.of(createDecimalToDecimalCoercer(fromDecimalType, toDecimalType)); + } + if (fromType instanceof DecimalType fromDecimalType && toType == DOUBLE) { + return Optional.of(createDecimalToDoubleCoercer(fromDecimalType)); + } + if (fromType instanceof DecimalType fromDecimalType && toType == REAL) { + return Optional.of(createDecimalToRealCoercer(fromDecimalType)); + } + if (fromType instanceof DecimalType fromDecimalType && toType instanceof VarcharType toVarcharType) { + return Optional.of(createDecimalToVarcharCoercer(fromDecimalType, toVarcharType)); + } + if (fromType == DOUBLE && toType instanceof DecimalType toDecimalType) { + return Optional.of(createDoubleToDecimalCoercer(toDecimalType)); + } + if (fromType == REAL && toType instanceof DecimalType toDecimalType) { + return Optional.of(createRealToDecimalCoercer(toDecimalType)); + } + if (fromType instanceof TimestampType timestampType && toType instanceof VarcharType varcharType) { + if (timestampType.isShort()) { + return Optional.of(new TimestampCoercer.ShortTimestampToVarcharCoercer(timestampType, varcharType)); + } + return Optional.of(new TimestampCoercer.LongTimestampToVarcharCoercer(timestampType, varcharType)); + } + if ((fromType instanceof ArrayType) && (toType instanceof ArrayType)) { + return Optional.of(new ListCoercer(typeManager, fromHiveType, toHiveType, timestampPrecision)); + } + if ((fromType instanceof MapType) && (toType instanceof MapType)) { + return Optional.of(new MapCoercer(typeManager, fromHiveType, toHiveType, timestampPrecision)); + } + if ((fromType instanceof RowType) && (toType instanceof RowType)) { + HiveType fromHiveTypeStruct = (fromHiveType.getCategory() == Category.UNION) ? HiveType.toHiveType(fromType) : fromHiveType; + HiveType toHiveTypeStruct = (toHiveType.getCategory() == Category.UNION) ? HiveType.toHiveType(toType) : toHiveType; + + return Optional.of(new StructCoercer(typeManager, fromHiveTypeStruct, toHiveTypeStruct, timestampPrecision)); + } + + throw new TrinoException(NOT_SUPPORTED, format("Unsupported coercion from %s to %s", fromHiveType, toHiveType)); + } + + public static boolean narrowerThan(VarcharType first, VarcharType second) + { + requireNonNull(first, "first is null"); + requireNonNull(second, "second is null"); + if (first.isUnbounded() || second.isUnbounded()) { + return !first.isUnbounded(); + } + return first.getBoundedLength() < second.getBoundedLength(); + } + + public static boolean narrowerThan(CharType first, CharType second) + { + requireNonNull(first, "first is null"); + requireNonNull(second, "second is null"); + return first.getLength() < second.getLength(); + } + + private static class ListCoercer + implements Function + { + private final Optional> elementCoercer; + + public ListCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + { + requireNonNull(typeManager, "typeManager is null"); + requireNonNull(fromHiveType, "fromHiveType is null"); + requireNonNull(toHiveType, "toHiveType is null"); + requireNonNull(timestampPrecision, "timestampPrecision is null"); + HiveType fromElementHiveType = HiveType.valueOf(((ListTypeInfo) fromHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName()); + HiveType toElementHiveType = HiveType.valueOf(((ListTypeInfo) toHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName()); + this.elementCoercer = createCoercer(typeManager, fromElementHiveType, toElementHiveType, timestampPrecision); + } + + @Override + public Block apply(Block block) + { + if (elementCoercer.isEmpty()) { + return block; + } + ColumnarArray arrayBlock = toColumnarArray(block); + Block elementsBlock = elementCoercer.get().apply(arrayBlock.getElementsBlock()); + boolean[] valueIsNull = new boolean[arrayBlock.getPositionCount()]; + int[] offsets = new int[arrayBlock.getPositionCount() + 1]; + for (int i = 0; i < arrayBlock.getPositionCount(); i++) { + valueIsNull[i] = arrayBlock.isNull(i); + offsets[i + 1] = offsets[i] + arrayBlock.getLength(i); + } + return ArrayBlock.fromElementBlock(arrayBlock.getPositionCount(), Optional.of(valueIsNull), offsets, elementsBlock); + } + } + + private static class MapCoercer + implements Function + { + private final Type toType; + private final Optional> keyCoercer; + private final Optional> valueCoercer; + + public MapCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + { + requireNonNull(typeManager, "typeManager is null"); + requireNonNull(fromHiveType, "fromHiveType is null"); + requireNonNull(timestampPrecision, "timestampPrecision is null"); + this.toType = toHiveType.getType(typeManager); + HiveType fromKeyHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); + HiveType fromValueHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); + HiveType toKeyHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); + HiveType toValueHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); + this.keyCoercer = createCoercer(typeManager, fromKeyHiveType, toKeyHiveType, timestampPrecision); + this.valueCoercer = createCoercer(typeManager, fromValueHiveType, toValueHiveType, timestampPrecision); + } + + @Override + public Block apply(Block block) + { + ColumnarMap mapBlock = toColumnarMap(block); + Block keysBlock = keyCoercer.isEmpty() ? mapBlock.getKeysBlock() : keyCoercer.get().apply(mapBlock.getKeysBlock()); + Block valuesBlock = valueCoercer.isEmpty() ? mapBlock.getValuesBlock() : valueCoercer.get().apply(mapBlock.getValuesBlock()); + boolean[] valueIsNull = new boolean[mapBlock.getPositionCount()]; + int[] offsets = new int[mapBlock.getPositionCount() + 1]; + for (int i = 0; i < mapBlock.getPositionCount(); i++) { + valueIsNull[i] = mapBlock.isNull(i); + offsets[i + 1] = offsets[i] + mapBlock.getEntryCount(i); + } + return ((MapType) toType).createBlockFromKeyValue(Optional.of(valueIsNull), offsets, keysBlock, valuesBlock); + } + } + + private static class StructCoercer + implements Function + { + private final List>> coercers; + private final Block[] nullBlocks; + + public StructCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + { + requireNonNull(typeManager, "typeManager is null"); + requireNonNull(fromHiveType, "fromHiveType is null"); + requireNonNull(toHiveType, "toHiveType is null"); + requireNonNull(timestampPrecision, "timestampPrecision is null"); + List fromFieldTypes = extractStructFieldTypes(fromHiveType); + List toFieldTypes = extractStructFieldTypes(toHiveType); + ImmutableList.Builder>> coercers = ImmutableList.builder(); + this.nullBlocks = new Block[toFieldTypes.size()]; + for (int i = 0; i < toFieldTypes.size(); i++) { + if (i >= fromFieldTypes.size()) { + nullBlocks[i] = toFieldTypes.get(i).getType(typeManager).createBlockBuilder(null, 1).appendNull().build(); + coercers.add(Optional.empty()); + } + else { + coercers.add(createCoercer(typeManager, fromFieldTypes.get(i), toFieldTypes.get(i), timestampPrecision)); + } + } + this.coercers = coercers.build(); + } + + @Override + public Block apply(Block block) + { + ColumnarRow rowBlock = toColumnarRow(block); + Block[] fields = new Block[coercers.size()]; + int[] ids = new int[rowBlock.getField(0).getPositionCount()]; + for (int i = 0; i < coercers.size(); i++) { + Optional> coercer = coercers.get(i); + if (coercer.isPresent()) { + fields[i] = coercer.get().apply(rowBlock.getField(i)); + } + else if (i < rowBlock.getFieldCount()) { + fields[i] = rowBlock.getField(i); + } + else { + fields[i] = DictionaryBlock.create(ids.length, nullBlocks[i], ids); + } + } + boolean[] valueIsNull = null; + if (rowBlock.mayHaveNull()) { + valueIsNull = new boolean[rowBlock.getPositionCount()]; + for (int i = 0; i < rowBlock.getPositionCount(); i++) { + valueIsNull[i] = rowBlock.isNull(i); + } + } + return RowBlock.fromFieldBlocks(rowBlock.getPositionCount(), Optional.ofNullable(valueIsNull), fields); + } + } +} diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/VarcharCoercer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/VarcharCoercer.java index 9ce92ce7c1fe..c434989c6637 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/VarcharCoercer.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/VarcharCoercer.java @@ -19,7 +19,7 @@ import io.trino.spi.type.VarcharType; import static com.google.common.base.Preconditions.checkArgument; -import static io.trino.plugin.hive.HivePageSource.narrowerThan; +import static io.trino.plugin.hive.coercions.CoercionUtils.narrowerThan; import static io.trino.spi.type.Varchars.truncateToLength; public class VarcharCoercer diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/coercions/TestTimestampCoercer.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/coercions/TestTimestampCoercer.java index 91bf91fff433..bed42fa22ce1 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/coercions/TestTimestampCoercer.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/coercions/TestTimestampCoercer.java @@ -26,10 +26,10 @@ import java.time.LocalDateTime; -import static io.trino.plugin.hive.HivePageSource.createCoercer; import static io.trino.plugin.hive.HiveTimestampPrecision.MICROSECONDS; import static io.trino.plugin.hive.HiveTimestampPrecision.NANOSECONDS; import static io.trino.plugin.hive.HiveType.toHiveType; +import static io.trino.plugin.hive.coercions.CoercionUtils.createCoercer; import static io.trino.spi.predicate.Utils.blockToNativeValue; import static io.trino.spi.predicate.Utils.nativeValueToBlock; import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS; From 760917fcfca328aa6898bb4dc963ea7c720da3d6 Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Wed, 21 Jun 2023 16:27:09 +0530 Subject: [PATCH 2/5] Use TypeCoercer instead of Function --- .../io/trino/plugin/hive/HivePageSource.java | 7 +-- .../plugin/hive/coercions/CoercionUtils.java | 47 +++++++++++++------ .../hive/coercions/DecimalCoercers.java | 14 +++--- 3 files changed, 43 insertions(+), 25 deletions(-) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java index 684954326966..e835093c7984 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSource.java @@ -17,6 +17,7 @@ import io.trino.filesystem.Location; import io.trino.plugin.hive.HivePageSourceProvider.BucketAdaptation; import io.trino.plugin.hive.HivePageSourceProvider.ColumnMapping; +import io.trino.plugin.hive.coercions.TypeCoercer; import io.trino.plugin.hive.type.TypeInfo; import io.trino.plugin.hive.util.HiveBucketing.BucketingVersion; import io.trino.spi.Page; @@ -68,7 +69,7 @@ public class HivePageSource private final Optional bucketValidator; private final Object[] prefilledValues; private final Type[] types; - private final List>> coercers; + private final List>> coercers; private final Optional projectionsAdapter; private final ConnectorPageSource delegate; @@ -97,7 +98,7 @@ public HivePageSource( prefilledValues = new Object[size]; types = new Type[size]; - ImmutableList.Builder>> coercers = ImmutableList.builder(); + ImmutableList.Builder>> coercers = ImmutableList.builder(); for (int columnIndex = 0; columnIndex < size; columnIndex++) { ColumnMapping columnMapping = columnMappings.get(columnIndex); @@ -189,7 +190,7 @@ public Page getNextPage() case REGULAR: case SYNTHESIZED: Block block = dataPage.getBlock(columnMapping.getIndex()); - Optional> coercer = coercers.get(fieldId); + Optional> coercer = coercers.get(fieldId); if (coercer.isPresent()) { block = new LazyBlock(batchSize, new CoercionLazyBlockLoader(block, coercer.get())); } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java index 6674d1140bec..dd160c4a21f8 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java @@ -22,6 +22,7 @@ import io.trino.spi.TrinoException; import io.trino.spi.block.ArrayBlock; import io.trino.spi.block.Block; +import io.trino.spi.block.BlockBuilder; import io.trino.spi.block.ColumnarArray; import io.trino.spi.block.ColumnarMap; import io.trino.spi.block.ColumnarRow; @@ -39,7 +40,6 @@ import java.util.List; import java.util.Optional; -import java.util.function.Function; import static io.trino.plugin.hive.HiveType.HIVE_BYTE; import static io.trino.plugin.hive.HiveType.HIVE_DOUBLE; @@ -67,7 +67,7 @@ public final class CoercionUtils { private CoercionUtils() {} - public static Optional> createCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + public static Optional> createCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) { if (fromHiveType.equals(toHiveType)) { return Optional.empty(); @@ -167,12 +167,13 @@ public static boolean narrowerThan(CharType first, CharType second) } private static class ListCoercer - implements Function + extends TypeCoercer { - private final Optional> elementCoercer; + private final Optional> elementCoercer; public ListCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) { + super((ArrayType) fromHiveType.getType(typeManager, timestampPrecision), (ArrayType) toHiveType.getType(typeManager, timestampPrecision)); requireNonNull(typeManager, "typeManager is null"); requireNonNull(fromHiveType, "fromHiveType is null"); requireNonNull(toHiveType, "toHiveType is null"); @@ -198,21 +199,26 @@ public Block apply(Block block) } return ArrayBlock.fromElementBlock(arrayBlock.getPositionCount(), Optional.of(valueIsNull), offsets, elementsBlock); } + + @Override + protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) + { + throw new UnsupportedOperationException("Not supported"); + } } private static class MapCoercer - implements Function + extends TypeCoercer { - private final Type toType; - private final Optional> keyCoercer; - private final Optional> valueCoercer; + private final Optional> keyCoercer; + private final Optional> valueCoercer; public MapCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) { + super((MapType) fromHiveType.getType(typeManager, timestampPrecision), (MapType) toHiveType.getType(typeManager, timestampPrecision)); requireNonNull(typeManager, "typeManager is null"); requireNonNull(fromHiveType, "fromHiveType is null"); requireNonNull(timestampPrecision, "timestampPrecision is null"); - this.toType = toHiveType.getType(typeManager); HiveType fromKeyHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); HiveType fromValueHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); HiveType toKeyHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); @@ -233,25 +239,32 @@ public Block apply(Block block) valueIsNull[i] = mapBlock.isNull(i); offsets[i + 1] = offsets[i] + mapBlock.getEntryCount(i); } - return ((MapType) toType).createBlockFromKeyValue(Optional.of(valueIsNull), offsets, keysBlock, valuesBlock); + return toType.createBlockFromKeyValue(Optional.of(valueIsNull), offsets, keysBlock, valuesBlock); + } + + @Override + protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) + { + throw new UnsupportedOperationException("Not supported"); } } private static class StructCoercer - implements Function + extends TypeCoercer { - private final List>> coercers; + private final List>> coercers; private final Block[] nullBlocks; public StructCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) { + super((RowType) fromHiveType.getType(typeManager, timestampPrecision), (RowType) toHiveType.getType(typeManager, timestampPrecision)); requireNonNull(typeManager, "typeManager is null"); requireNonNull(fromHiveType, "fromHiveType is null"); requireNonNull(toHiveType, "toHiveType is null"); requireNonNull(timestampPrecision, "timestampPrecision is null"); List fromFieldTypes = extractStructFieldTypes(fromHiveType); List toFieldTypes = extractStructFieldTypes(toHiveType); - ImmutableList.Builder>> coercers = ImmutableList.builder(); + ImmutableList.Builder>> coercers = ImmutableList.builder(); this.nullBlocks = new Block[toFieldTypes.size()]; for (int i = 0; i < toFieldTypes.size(); i++) { if (i >= fromFieldTypes.size()) { @@ -272,7 +285,7 @@ public Block apply(Block block) Block[] fields = new Block[coercers.size()]; int[] ids = new int[rowBlock.getField(0).getPositionCount()]; for (int i = 0; i < coercers.size(); i++) { - Optional> coercer = coercers.get(i); + Optional> coercer = coercers.get(i); if (coercer.isPresent()) { fields[i] = coercer.get().apply(rowBlock.getField(i)); } @@ -292,5 +305,11 @@ else if (i < rowBlock.getFieldCount()) { } return RowBlock.fromFieldBlocks(rowBlock.getPositionCount(), Optional.ofNullable(valueIsNull), fields); } + + @Override + protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) + { + throw new UnsupportedOperationException("Not supported"); + } } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/DecimalCoercers.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/DecimalCoercers.java index bc60aee1850b..688d1327d414 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/DecimalCoercers.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/DecimalCoercers.java @@ -24,8 +24,6 @@ import io.trino.spi.type.RealType; import io.trino.spi.type.VarcharType; -import java.util.function.Function; - import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS; import static io.trino.spi.type.DecimalConversions.doubleToLongDecimal; import static io.trino.spi.type.DecimalConversions.doubleToShortDecimal; @@ -49,7 +47,7 @@ public final class DecimalCoercers { private DecimalCoercers() {} - public static Function createDecimalToDecimalCoercer(DecimalType fromType, DecimalType toType) + public static TypeCoercer createDecimalToDecimalCoercer(DecimalType fromType, DecimalType toType) { if (fromType.isShort()) { if (toType.isShort()) { @@ -148,7 +146,7 @@ protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int pos } } - public static Function createDecimalToDoubleCoercer(DecimalType fromType) + public static TypeCoercer createDecimalToDoubleCoercer(DecimalType fromType) { if (fromType.isShort()) { return new ShortDecimalToDoubleCoercer(fromType); @@ -191,7 +189,7 @@ protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int pos } } - public static Function createDecimalToRealCoercer(DecimalType fromType) + public static TypeCoercer createDecimalToRealCoercer(DecimalType fromType) { if (fromType.isShort()) { return new ShortDecimalToRealCoercer(fromType); @@ -234,7 +232,7 @@ protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int pos } } - public static Function createDecimalToVarcharCoercer(DecimalType fromType, VarcharType toType) + public static TypeCoercer createDecimalToVarcharCoercer(DecimalType fromType, VarcharType toType) { if (fromType.isShort()) { return new ShortDecimalToVarcharCoercer(fromType, toType); @@ -288,7 +286,7 @@ protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int pos } } - public static Function createDoubleToDecimalCoercer(DecimalType toType) + public static TypeCoercer createDoubleToDecimalCoercer(DecimalType toType) { if (toType.isShort()) { return new DoubleToShortDecimalCoercer(toType); @@ -328,7 +326,7 @@ protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int pos } } - public static Function createRealToDecimalCoercer(DecimalType toType) + public static TypeCoercer createRealToDecimalCoercer(DecimalType toType) { if (toType.isShort()) { return new RealToShortDecimalCoercer(toType); From df2f2ebb0ad9f6cc41801b414d8c5dad0f8f1e18 Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Wed, 21 Jun 2023 17:03:14 +0530 Subject: [PATCH 3/5] For structural type derive from/to types from underlying coercer --- .../plugin/hive/coercions/CoercionUtils.java | 163 +++++++++++++----- 1 file changed, 116 insertions(+), 47 deletions(-) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java index dd160c4a21f8..a77641c1d780 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java @@ -19,6 +19,7 @@ import io.trino.plugin.hive.type.Category; import io.trino.plugin.hive.type.ListTypeInfo; import io.trino.plugin.hive.type.MapTypeInfo; +import io.trino.plugin.hive.type.StructTypeInfo; import io.trino.spi.TrinoException; import io.trino.spi.block.ArrayBlock; import io.trino.spi.block.Block; @@ -33,6 +34,7 @@ import io.trino.spi.type.DecimalType; import io.trino.spi.type.MapType; import io.trino.spi.type.RowType; +import io.trino.spi.type.RowType.Field; import io.trino.spi.type.TimestampType; import io.trino.spi.type.Type; import io.trino.spi.type.TypeManager; @@ -53,7 +55,6 @@ import static io.trino.plugin.hive.coercions.DecimalCoercers.createDecimalToVarcharCoercer; import static io.trino.plugin.hive.coercions.DecimalCoercers.createDoubleToDecimalCoercer; import static io.trino.plugin.hive.coercions.DecimalCoercers.createRealToDecimalCoercer; -import static io.trino.plugin.hive.util.HiveUtil.extractStructFieldTypes; import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED; import static io.trino.spi.block.ColumnarArray.toColumnarArray; import static io.trino.spi.block.ColumnarMap.toColumnarMap; @@ -134,16 +135,28 @@ private CoercionUtils() {} return Optional.of(new TimestampCoercer.LongTimestampToVarcharCoercer(timestampType, varcharType)); } if ((fromType instanceof ArrayType) && (toType instanceof ArrayType)) { - return Optional.of(new ListCoercer(typeManager, fromHiveType, toHiveType, timestampPrecision)); + return createCoercerForList( + typeManager, + (ListTypeInfo) fromHiveType.getTypeInfo(), + (ListTypeInfo) toHiveType.getTypeInfo(), + timestampPrecision); } if ((fromType instanceof MapType) && (toType instanceof MapType)) { - return Optional.of(new MapCoercer(typeManager, fromHiveType, toHiveType, timestampPrecision)); + return createCoercerForMap( + typeManager, + (MapTypeInfo) fromHiveType.getTypeInfo(), + (MapTypeInfo) toHiveType.getTypeInfo(), + timestampPrecision); } if ((fromType instanceof RowType) && (toType instanceof RowType)) { HiveType fromHiveTypeStruct = (fromHiveType.getCategory() == Category.UNION) ? HiveType.toHiveType(fromType) : fromHiveType; HiveType toHiveTypeStruct = (toHiveType.getCategory() == Category.UNION) ? HiveType.toHiveType(toType) : toHiveType; - return Optional.of(new StructCoercer(typeManager, fromHiveTypeStruct, toHiveTypeStruct, timestampPrecision)); + return createCoercerForStruct( + typeManager, + (StructTypeInfo) fromHiveTypeStruct.getTypeInfo(), + (StructTypeInfo) toHiveTypeStruct.getTypeInfo(), + timestampPrecision); } throw new TrinoException(NOT_SUPPORTED, format("Unsupported coercion from %s to %s", fromHiveType, toHiveType)); @@ -166,31 +179,100 @@ public static boolean narrowerThan(CharType first, CharType second) return first.getLength() < second.getLength(); } + private static Optional> createCoercerForList( + TypeManager typeManager, + ListTypeInfo fromListTypeInfo, + ListTypeInfo toListTypeInfo, + HiveTimestampPrecision timestampPrecision) + { + HiveType fromElementHiveType = HiveType.valueOf(fromListTypeInfo.getListElementTypeInfo().getTypeName()); + HiveType toElementHiveType = HiveType.valueOf(toListTypeInfo.getListElementTypeInfo().getTypeName()); + + return createCoercer(typeManager, fromElementHiveType, toElementHiveType, timestampPrecision) + .map(elementCoercer -> new ListCoercer(new ArrayType(elementCoercer.getFromType()), new ArrayType(elementCoercer.getToType()), elementCoercer)); + } + + private static Optional> createCoercerForMap( + TypeManager typeManager, + MapTypeInfo fromMapTypeInfo, + MapTypeInfo toMapTypeInfo, + HiveTimestampPrecision timestampPrecision) + { + HiveType fromKeyHiveType = HiveType.valueOf(fromMapTypeInfo.getMapKeyTypeInfo().getTypeName()); + HiveType fromValueHiveType = HiveType.valueOf(fromMapTypeInfo.getMapValueTypeInfo().getTypeName()); + HiveType toKeyHiveType = HiveType.valueOf(toMapTypeInfo.getMapKeyTypeInfo().getTypeName()); + HiveType toValueHiveType = HiveType.valueOf(toMapTypeInfo.getMapValueTypeInfo().getTypeName()); + Optional> keyCoercer = createCoercer(typeManager, fromKeyHiveType, toKeyHiveType, timestampPrecision); + Optional> valueCoercer = createCoercer(typeManager, fromValueHiveType, toValueHiveType, timestampPrecision); + MapType fromType = new MapType( + keyCoercer.map(TypeCoercer::getFromType).orElseGet(() -> fromKeyHiveType.getType(typeManager, timestampPrecision)), + valueCoercer.map(TypeCoercer::getFromType).orElseGet(() -> fromValueHiveType.getType(typeManager, timestampPrecision)), + typeManager.getTypeOperators()); + + MapType toType = new MapType( + keyCoercer.map(TypeCoercer::getToType).orElseGet(() -> toKeyHiveType.getType(typeManager, timestampPrecision)), + valueCoercer.map(TypeCoercer::getToType).orElseGet(() -> toValueHiveType.getType(typeManager, timestampPrecision)), + typeManager.getTypeOperators()); + + return Optional.of(new MapCoercer(fromType, toType, keyCoercer, valueCoercer)); + } + + private static Optional> createCoercerForStruct( + TypeManager typeManager, + StructTypeInfo fromStructTypeInfo, + StructTypeInfo toStructTypeInfo, + HiveTimestampPrecision timestampPrecision) + { + ImmutableList.Builder>> coercers = ImmutableList.builder(); + ImmutableList.Builder fromField = ImmutableList.builder(); + ImmutableList.Builder toField = ImmutableList.builder(); + + List fromStructFieldName = fromStructTypeInfo.getAllStructFieldNames(); + List toStructFieldNames = toStructTypeInfo.getAllStructFieldNames(); + + for (int i = 0; i < toStructFieldNames.size(); i++) { + HiveType toStructFieldType = HiveType.valueOf(toStructTypeInfo.getAllStructFieldTypeInfos().get(i).getTypeName()); + if (i >= fromStructFieldName.size()) { + toField.add(new Field( + Optional.of(toStructFieldNames.get(i)), + toStructFieldType.getType(typeManager, timestampPrecision))); + coercers.add(Optional.empty()); + } + else { + HiveType fromStructFieldType = HiveType.valueOf(fromStructTypeInfo.getAllStructFieldTypeInfos().get(i).getTypeName()); + + Optional> coercer = createCoercer(typeManager, fromStructFieldType, toStructFieldType, timestampPrecision); + + fromField.add(new Field( + Optional.of(fromStructFieldName.get(i)), + coercer.map(TypeCoercer::getFromType).orElseGet(() -> fromStructFieldType.getType(typeManager, timestampPrecision)))); + toField.add(new Field( + Optional.of(toStructFieldNames.get(i)), + coercer.map(TypeCoercer::getToType).orElseGet(() -> toStructFieldType.getType(typeManager, timestampPrecision)))); + + coercers.add(coercer); + } + } + + return Optional.of(new StructCoercer(RowType.from(fromField.build()), RowType.from(toField.build()), coercers.build())); + } + private static class ListCoercer extends TypeCoercer { - private final Optional> elementCoercer; + private final TypeCoercer elementCoercer; - public ListCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + public ListCoercer(ArrayType fromType, ArrayType toType, TypeCoercer elementCoercer) { - super((ArrayType) fromHiveType.getType(typeManager, timestampPrecision), (ArrayType) toHiveType.getType(typeManager, timestampPrecision)); - requireNonNull(typeManager, "typeManager is null"); - requireNonNull(fromHiveType, "fromHiveType is null"); - requireNonNull(toHiveType, "toHiveType is null"); - requireNonNull(timestampPrecision, "timestampPrecision is null"); - HiveType fromElementHiveType = HiveType.valueOf(((ListTypeInfo) fromHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName()); - HiveType toElementHiveType = HiveType.valueOf(((ListTypeInfo) toHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName()); - this.elementCoercer = createCoercer(typeManager, fromElementHiveType, toElementHiveType, timestampPrecision); + super(fromType, toType); + this.elementCoercer = requireNonNull(elementCoercer, "elementCoercer is null"); } @Override public Block apply(Block block) { - if (elementCoercer.isEmpty()) { - return block; - } ColumnarArray arrayBlock = toColumnarArray(block); - Block elementsBlock = elementCoercer.get().apply(arrayBlock.getElementsBlock()); + Block elementsBlock = elementCoercer.apply(arrayBlock.getElementsBlock()); boolean[] valueIsNull = new boolean[arrayBlock.getPositionCount()]; int[] offsets = new int[arrayBlock.getPositionCount() + 1]; for (int i = 0; i < arrayBlock.getPositionCount(); i++) { @@ -213,18 +295,15 @@ private static class MapCoercer private final Optional> keyCoercer; private final Optional> valueCoercer; - public MapCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + public MapCoercer( + MapType fromType, + MapType toType, + Optional> keyCoercer, + Optional> valueCoercer) { - super((MapType) fromHiveType.getType(typeManager, timestampPrecision), (MapType) toHiveType.getType(typeManager, timestampPrecision)); - requireNonNull(typeManager, "typeManager is null"); - requireNonNull(fromHiveType, "fromHiveType is null"); - requireNonNull(timestampPrecision, "timestampPrecision is null"); - HiveType fromKeyHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); - HiveType fromValueHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); - HiveType toKeyHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); - HiveType toValueHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); - this.keyCoercer = createCoercer(typeManager, fromKeyHiveType, toKeyHiveType, timestampPrecision); - this.valueCoercer = createCoercer(typeManager, fromValueHiveType, toValueHiveType, timestampPrecision); + super(fromType, toType); + this.keyCoercer = requireNonNull(keyCoercer, "keyCoercer is null"); + this.valueCoercer = requireNonNull(valueCoercer, "valueCoercer is null"); } @Override @@ -255,27 +334,17 @@ private static class StructCoercer private final List>> coercers; private final Block[] nullBlocks; - public StructCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + public StructCoercer(RowType fromType, RowType toType, List>> coercers) { - super((RowType) fromHiveType.getType(typeManager, timestampPrecision), (RowType) toHiveType.getType(typeManager, timestampPrecision)); - requireNonNull(typeManager, "typeManager is null"); - requireNonNull(fromHiveType, "fromHiveType is null"); - requireNonNull(toHiveType, "toHiveType is null"); - requireNonNull(timestampPrecision, "timestampPrecision is null"); - List fromFieldTypes = extractStructFieldTypes(fromHiveType); - List toFieldTypes = extractStructFieldTypes(toHiveType); - ImmutableList.Builder>> coercers = ImmutableList.builder(); - this.nullBlocks = new Block[toFieldTypes.size()]; - for (int i = 0; i < toFieldTypes.size(); i++) { - if (i >= fromFieldTypes.size()) { - nullBlocks[i] = toFieldTypes.get(i).getType(typeManager).createBlockBuilder(null, 1).appendNull().build(); - coercers.add(Optional.empty()); - } - else { - coercers.add(createCoercer(typeManager, fromFieldTypes.get(i), toFieldTypes.get(i), timestampPrecision)); + super(fromType, toType); + this.coercers = ImmutableList.copyOf(requireNonNull(coercers, "coercers is null")); + List toTypeFields = toType.getFields(); + this.nullBlocks = new Block[toTypeFields.size()]; + for (int i = 0; i < toTypeFields.size(); i++) { + if (i >= fromType.getFields().size()) { + nullBlocks[i] = toTypeFields.get(i).getType().createBlockBuilder(null, 1).appendNull().build(); } } - this.coercers = coercers.build(); } @Override From ee473c0cff403400bbd072d152b18461ac03de5c Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Thu, 22 Jun 2023 11:55:53 +0530 Subject: [PATCH 4/5] Derive read column type from the TypeCoercer --- .../java/io/trino/plugin/hive/HivePageSourceProvider.java | 5 +++-- .../java/io/trino/plugin/hive/coercions/CoercionUtils.java | 7 +++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSourceProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSourceProvider.java index 14c80878e579..4cc269ce57b8 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSourceProvider.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSourceProvider.java @@ -71,6 +71,7 @@ import static io.trino.plugin.hive.HivePageSourceProvider.ColumnMapping.toColumnHandles; import static io.trino.plugin.hive.HivePageSourceProvider.ColumnMappingKind.PREFILLED; import static io.trino.plugin.hive.HiveSessionProperties.getTimestampPrecision; +import static io.trino.plugin.hive.coercions.CoercionUtils.createTypeFromCoercer; import static io.trino.plugin.hive.util.HiveBucketing.HiveBucketFilter; import static io.trino.plugin.hive.util.HiveBucketing.getHiveBucketFilter; import static io.trino.plugin.hive.util.HiveUtil.getPrefilledColumnValue; @@ -564,14 +565,14 @@ public static List toColumnHandles(List regular projectedColumn.getDereferenceIndices(), projectedColumn.getDereferenceNames(), fromHiveType, - fromHiveType.getType(typeManager, timestampPrecision)); + createTypeFromCoercer(typeManager, fromHiveType, columnHandle.getHiveType(), timestampPrecision)); }); return new HiveColumnHandle( columnHandle.getBaseColumnName(), columnHandle.getBaseHiveColumnIndex(), fromHiveTypeBase, - fromHiveTypeBase.getType(typeManager, timestampPrecision), + createTypeFromCoercer(typeManager, fromHiveTypeBase, columnHandle.getBaseHiveType(), timestampPrecision), newColumnProjectionInfo, columnHandle.getColumnType(), columnHandle.getComment()); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java index a77641c1d780..abbe99be9b4e 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java @@ -68,6 +68,13 @@ public final class CoercionUtils { private CoercionUtils() {} + public static Type createTypeFromCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) + { + return createCoercer(typeManager, fromHiveType, toHiveType, timestampPrecision) + .map(TypeCoercer::getFromType) + .orElseGet(() -> fromHiveType.getType(typeManager, timestampPrecision)); + } + public static Optional> createCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, HiveTimestampPrecision timestampPrecision) { if (fromHiveType.equals(toHiveType)) { From e864bcec4d3c7a2c543a7cfda6a7d2474f36a4ce Mon Sep 17 00:00:00 2001 From: praveenkrishna Date: Thu, 22 Jun 2023 11:56:09 +0530 Subject: [PATCH 5/5] Treat precision as NANOSECONDS for timestamp to be coerced This will be irrespective of the precision configured or specified as session property. --- .../plugin/hive/coercions/CoercionUtils.java | 8 +- .../hive/coercions/TimestampCoercer.java | 23 ----- .../plugin/hive/orc/OrcPageSourceFactory.java | 3 +- .../plugin/hive/orc/OrcTypeTranslator.java | 13 +-- .../hive/coercions/TestTimestampCoercer.java | 86 +++++++------------ .../product/hive/BaseTestHiveCoercion.java | 51 +++++++---- .../TestHiveCoercionOnPartitionedTable.java | 7 +- .../TestHiveCoercionOnUnpartitionedTable.java | 10 ++- 8 files changed, 84 insertions(+), 117 deletions(-) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java index abbe99be9b4e..d6e7ef6ee5ae 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/CoercionUtils.java @@ -61,6 +61,7 @@ import static io.trino.spi.block.ColumnarRow.toColumnarRow; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.RealType.REAL; +import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS; import static java.lang.String.format; import static java.util.Objects.requireNonNull; @@ -135,11 +136,8 @@ public static Type createTypeFromCoercer(TypeManager typeManager, HiveType fromH if (fromType == REAL && toType instanceof DecimalType toDecimalType) { return Optional.of(createRealToDecimalCoercer(toDecimalType)); } - if (fromType instanceof TimestampType timestampType && toType instanceof VarcharType varcharType) { - if (timestampType.isShort()) { - return Optional.of(new TimestampCoercer.ShortTimestampToVarcharCoercer(timestampType, varcharType)); - } - return Optional.of(new TimestampCoercer.LongTimestampToVarcharCoercer(timestampType, varcharType)); + if (fromType instanceof TimestampType && toType instanceof VarcharType varcharType) { + return Optional.of(new TimestampCoercer.LongTimestampToVarcharCoercer(TIMESTAMP_NANOS, varcharType)); } if ((fromType instanceof ArrayType) && (toType instanceof ArrayType)) { return createCoercerForList( diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/TimestampCoercer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/TimestampCoercer.java index 2e15793365e5..b44f5295f100 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/TimestampCoercer.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/coercions/TimestampCoercer.java @@ -48,29 +48,6 @@ public final class TimestampCoercer private TimestampCoercer() {} - public static class ShortTimestampToVarcharCoercer - extends TypeCoercer - { - public ShortTimestampToVarcharCoercer(TimestampType fromType, VarcharType toType) - { - super(fromType, toType); - } - - @Override - protected void applyCoercedValue(BlockBuilder blockBuilder, Block block, int position) - { - long epochMicros = fromType.getLong(block, position); - long epochSecond = floorDiv(epochMicros, MICROSECONDS_PER_SECOND); - int nanoFraction = floorMod(epochMicros, MICROSECONDS_PER_SECOND) * NANOSECONDS_PER_MICROSECOND; - toType.writeSlice( - blockBuilder, - truncateToLength( - Slices.utf8Slice( - LOCAL_DATE_TIME.format(LocalDateTime.ofEpochSecond(epochSecond, nanoFraction, UTC))), - toType)); - } - } - public static class LongTimestampToVarcharCoercer extends TypeCoercer { diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java index b6007ec28c6b..d85b0d4d3a40 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java @@ -90,7 +90,6 @@ import static io.trino.plugin.hive.HiveSessionProperties.getOrcMaxReadBlockSize; import static io.trino.plugin.hive.HiveSessionProperties.getOrcStreamBufferSize; import static io.trino.plugin.hive.HiveSessionProperties.getOrcTinyStripeThreshold; -import static io.trino.plugin.hive.HiveSessionProperties.getTimestampPrecision; import static io.trino.plugin.hive.HiveSessionProperties.isOrcBloomFiltersEnabled; import static io.trino.plugin.hive.HiveSessionProperties.isOrcNestedLazy; import static io.trino.plugin.hive.HiveSessionProperties.isUseOrcColumnNames; @@ -366,7 +365,7 @@ else if (column.getBaseHiveColumnIndex() < fileColumns.size()) { Type readType = column.getType(); if (orcColumn != null) { int sourceIndex = fileReadColumns.size(); - Optional> coercer = createCoercer(orcColumn.getColumnType(), readType, getTimestampPrecision(session)); + Optional> coercer = createCoercer(orcColumn.getColumnType(), readType); if (coercer.isPresent()) { fileReadTypes.add(coercer.get().getFromType()); columnAdaptations.add(ColumnAdaptation.coercedColumn(sourceIndex, coercer.get())); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcTypeTranslator.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcTypeTranslator.java index edfe07f869d8..77028d6f4643 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcTypeTranslator.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcTypeTranslator.java @@ -14,31 +14,24 @@ package io.trino.plugin.hive.orc; import io.trino.orc.metadata.OrcType.OrcTypeKind; -import io.trino.plugin.hive.HiveTimestampPrecision; import io.trino.plugin.hive.coercions.TimestampCoercer.LongTimestampToVarcharCoercer; -import io.trino.plugin.hive.coercions.TimestampCoercer.ShortTimestampToVarcharCoercer; import io.trino.plugin.hive.coercions.TypeCoercer; -import io.trino.spi.type.TimestampType; import io.trino.spi.type.Type; import io.trino.spi.type.VarcharType; import java.util.Optional; import static io.trino.orc.metadata.OrcType.OrcTypeKind.TIMESTAMP; -import static io.trino.spi.type.TimestampType.createTimestampType; +import static io.trino.spi.type.TimestampType.TIMESTAMP_NANOS; public final class OrcTypeTranslator { private OrcTypeTranslator() {} - public static Optional> createCoercer(OrcTypeKind fromOrcType, Type toTrinoType, HiveTimestampPrecision timestampPrecision) + public static Optional> createCoercer(OrcTypeKind fromOrcType, Type toTrinoType) { if (fromOrcType == TIMESTAMP && toTrinoType instanceof VarcharType varcharType) { - TimestampType timestampType = createTimestampType(timestampPrecision.getPrecision()); - if (timestampType.isShort()) { - return Optional.of(new ShortTimestampToVarcharCoercer(timestampType, varcharType)); - } - return Optional.of(new LongTimestampToVarcharCoercer(timestampType, varcharType)); + return Optional.of(new LongTimestampToVarcharCoercer(TIMESTAMP_NANOS, varcharType)); } return Optional.empty(); } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/coercions/TestTimestampCoercer.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/coercions/TestTimestampCoercer.java index bed42fa22ce1..deabbe331ee6 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/coercions/TestTimestampCoercer.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/coercions/TestTimestampCoercer.java @@ -26,13 +26,11 @@ import java.time.LocalDateTime; -import static io.trino.plugin.hive.HiveTimestampPrecision.MICROSECONDS; import static io.trino.plugin.hive.HiveTimestampPrecision.NANOSECONDS; import static io.trino.plugin.hive.HiveType.toHiveType; import static io.trino.plugin.hive.coercions.CoercionUtils.createCoercer; import static io.trino.spi.predicate.Utils.blockToNativeValue; import static io.trino.spi.predicate.Utils.nativeValueToBlock; -import static io.trino.spi.type.TimestampType.TIMESTAMP_MICROS; import static io.trino.spi.type.TimestampType.TIMESTAMP_PICOS; import static io.trino.spi.type.VarcharType.createUnboundedVarcharType; import static io.trino.spi.type.VarcharType.createVarcharType; @@ -44,15 +42,7 @@ public class TestTimestampCoercer { @Test(dataProvider = "timestampValuesProvider") - public void testShortTimestampToVarchar(String timestampValue, String hiveTimestampValue) - { - LocalDateTime localDateTime = LocalDateTime.parse(timestampValue); - SqlTimestamp timestamp = SqlTimestamp.fromSeconds(TIMESTAMP_MICROS.getPrecision(), localDateTime.toEpochSecond(UTC), localDateTime.get(NANO_OF_SECOND)); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createUnboundedVarcharType(), hiveTimestampValue); - } - - @Test(dataProvider = "timestampValuesProvider") - public void testLongTimestampToVarchar(String timestampValue, String hiveTimestampValue) + public void testTimestampToVarchar(String timestampValue, String hiveTimestampValue) { LocalDateTime localDateTime = LocalDateTime.parse(timestampValue); SqlTimestamp timestamp = SqlTimestamp.fromSeconds(TIMESTAMP_PICOS.getPrecision(), localDateTime.toEpochSecond(UTC), localDateTime.get(NANO_OF_SECOND)); @@ -60,47 +50,40 @@ public void testLongTimestampToVarchar(String timestampValue, String hiveTimesta } @Test - public void testShortTimestampToSmallerVarchar() - { - LocalDateTime localDateTime = LocalDateTime.parse("2023-04-11T05:16:12.345678"); - SqlTimestamp timestamp = SqlTimestamp.fromSeconds(TIMESTAMP_MICROS.getPrecision(), localDateTime.toEpochSecond(UTC), localDateTime.get(NANO_OF_SECOND)); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(1), "2"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(2), "20"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(3), "202"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(4), "2023"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(5), "2023-"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(6), "2023-0"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(7), "2023-04"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(8), "2023-04-"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(9), "2023-04-1"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(10), "2023-04-11"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(11), "2023-04-11 "); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(12), "2023-04-11 0"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(13), "2023-04-11 05"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(14), "2023-04-11 05:"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(15), "2023-04-11 05:1"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(16), "2023-04-11 05:16"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(17), "2023-04-11 05:16:"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(18), "2023-04-11 05:16:1"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(19), "2023-04-11 05:16:12"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(20), "2023-04-11 05:16:12."); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(21), "2023-04-11 05:16:12.3"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(22), "2023-04-11 05:16:12.34"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(23), "2023-04-11 05:16:12.345"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(24), "2023-04-11 05:16:12.3456"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(25), "2023-04-11 05:16:12.34567"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(26), "2023-04-11 05:16:12.345678"); - assertShortTimestampToVarcharCoercions(TIMESTAMP_MICROS, timestamp.getEpochMicros(), createVarcharType(27), "2023-04-11 05:16:12.345678"); - } - - @Test - public void testLongTimestampToSmallerVarchar() + public void testTimestampToSmallerVarchar() { LocalDateTime localDateTime = LocalDateTime.parse("2023-04-11T05:16:12.345678876"); SqlTimestamp timestamp = SqlTimestamp.fromSeconds(TIMESTAMP_PICOS.getPrecision(), localDateTime.toEpochSecond(UTC), localDateTime.get(NANO_OF_SECOND)); - assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, new LongTimestamp(timestamp.getEpochMicros(), timestamp.getPicosOfMicros()), createVarcharType(27), "2023-04-11 05:16:12.3456788"); - assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, new LongTimestamp(timestamp.getEpochMicros(), timestamp.getPicosOfMicros()), createVarcharType(28), "2023-04-11 05:16:12.34567887"); - assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, new LongTimestamp(timestamp.getEpochMicros(), timestamp.getPicosOfMicros()), createVarcharType(29), "2023-04-11 05:16:12.345678876"); + LongTimestamp longTimestamp = new LongTimestamp(timestamp.getEpochMicros(), timestamp.getPicosOfMicros()); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(1), "2"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(2), "20"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(3), "202"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(4), "2023"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(5), "2023-"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(6), "2023-0"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(7), "2023-04"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(8), "2023-04-"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(9), "2023-04-1"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(10), "2023-04-11"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(11), "2023-04-11 "); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(12), "2023-04-11 0"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(13), "2023-04-11 05"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(14), "2023-04-11 05:"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(15), "2023-04-11 05:1"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(16), "2023-04-11 05:16"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(17), "2023-04-11 05:16:"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(18), "2023-04-11 05:16:1"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(19), "2023-04-11 05:16:12"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(20), "2023-04-11 05:16:12."); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(21), "2023-04-11 05:16:12.3"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(22), "2023-04-11 05:16:12.34"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(23), "2023-04-11 05:16:12.345"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(24), "2023-04-11 05:16:12.3456"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(25), "2023-04-11 05:16:12.34567"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(26), "2023-04-11 05:16:12.345678"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(27), "2023-04-11 05:16:12.3456788"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(28), "2023-04-11 05:16:12.34567887"); + assertLongTimestampToVarcharCoercions(TIMESTAMP_PICOS, longTimestamp, createVarcharType(29), "2023-04-11 05:16:12.345678876"); } @DataProvider @@ -129,11 +112,6 @@ public Object[][] timestampValuesProvider() }; } - public static void assertShortTimestampToVarcharCoercions(TimestampType fromType, Long valueToBeCoerced, VarcharType toType, String expectedValue) - { - assertCoercions(fromType, valueToBeCoerced, toType, Slices.utf8Slice(expectedValue), MICROSECONDS); - } - public static void assertLongTimestampToVarcharCoercions(TimestampType fromType, LongTimestamp valueToBeCoerced, VarcharType toType, String expectedValue) { assertCoercions(fromType, valueToBeCoerced, toType, Slices.utf8Slice(expectedValue), NANOSECONDS); diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/BaseTestHiveCoercion.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/BaseTestHiveCoercion.java index e0cf2f4e710d..a47442365ecd 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/BaseTestHiveCoercion.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/BaseTestHiveCoercion.java @@ -17,6 +17,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; +import com.google.common.collect.Streams; import io.trino.jdbc.TrinoArray; import io.trino.plugin.hive.HiveTimestampPrecision; import io.trino.tempto.assertions.QueryAssert.Row; @@ -382,9 +383,10 @@ protected void doTestHiveCoercionWithDifferentTimestampPrecision(HiveTableDefini """ INSERT INTO %s SELECT - (CAST(ROW (timestamp_value, -1) AS ROW(keep TIMESTAMP(9), si2i SMALLINT))), - ARRAY [CAST(ROW (timestamp_value, -1) AS ROW (keep TIMESTAMP(9), si2i SMALLINT))], - MAP (ARRAY [2], ARRAY [CAST(ROW (timestamp_value, -1) AS ROW (keep TIMESTAMP(9), si2i SMALLINT))]), + (CAST(ROW (timestamp_value, -1, timestamp_value) AS ROW(keep TIMESTAMP(9), si2i SMALLINT, timestamp2string TIMESTAMP(9)))), + ARRAY [CAST(ROW (timestamp_value, -1, timestamp_value) AS ROW (keep TIMESTAMP(9), si2i SMALLINT, timestamp2string TIMESTAMP(9)))], + MAP (ARRAY [2], ARRAY [CAST(ROW (timestamp_value, -1, timestamp_value) AS ROW (keep TIMESTAMP(9), si2i SMALLINT, timestamp2string TIMESTAMP(9)))]), + timestamp_value, 1 FROM (VALUES (TIMESTAMP '2121-07-15 15:30:12.123499'), @@ -395,22 +397,25 @@ protected void doTestHiveCoercionWithDifferentTimestampPrecision(HiveTableDefini (TIMESTAMP '2121-07-15 15:30:12.123500001')) AS t (timestamp_value) """.formatted(tableName)); - onHive().executeQuery(format("ALTER TABLE %s CHANGE COLUMN timestamp_row_to_row timestamp_row_to_row struct", tableName)); - onHive().executeQuery(format("ALTER TABLE %s CHANGE COLUMN timestamp_list_to_list timestamp_list_to_list array>", tableName)); - onHive().executeQuery(format("ALTER TABLE %s CHANGE COLUMN timestamp_map_to_map timestamp_map_to_map map>", tableName)); + onHive().executeQuery(format("ALTER TABLE %s CHANGE COLUMN timestamp_row_to_row timestamp_row_to_row struct", tableName)); + onHive().executeQuery(format("ALTER TABLE %s CHANGE COLUMN timestamp_list_to_list timestamp_list_to_list array>", tableName)); + onHive().executeQuery(format("ALTER TABLE %s CHANGE COLUMN timestamp_map_to_map timestamp_map_to_map map>", tableName)); + onHive().executeQuery(format("ALTER TABLE %s CHANGE COLUMN timestamp_to_string timestamp_to_string string", tableName)); for (HiveTimestampPrecision hiveTimestampPrecision : HiveTimestampPrecision.values()) { setHiveTimestampPrecision(hiveTimestampPrecision); assertThat(onTrino().executeQuery("SHOW COLUMNS FROM " + tableName).project(1, 2)).containsExactlyInOrder( - row("timestamp_row_to_row", "row(keep timestamp(%d), si2i integer)".formatted(hiveTimestampPrecision.getPrecision())), - row("timestamp_list_to_list", "array(row(keep timestamp(%d), si2i integer))".formatted(hiveTimestampPrecision.getPrecision())), - row("timestamp_map_to_map", "map(integer, row(keep timestamp(%d), si2i integer))".formatted(hiveTimestampPrecision.getPrecision())), + row("timestamp_row_to_row", "row(keep timestamp(%d), si2i integer, timestamp2string varchar)".formatted(hiveTimestampPrecision.getPrecision())), + row("timestamp_list_to_list", "array(row(keep timestamp(%d), si2i integer, timestamp2string varchar))".formatted(hiveTimestampPrecision.getPrecision())), + row("timestamp_map_to_map", "map(integer, row(keep timestamp(%d), si2i integer, timestamp2string varchar))".formatted(hiveTimestampPrecision.getPrecision())), + row("timestamp_to_string", "varchar"), row("id", "bigint")); List allColumns = ImmutableList.of( "timestamp_row_to_row", "timestamp_list_to_list", "timestamp_map_to_map", + "timestamp_to_string", "id"); // For Trino, remove unsupported columns @@ -434,14 +439,21 @@ protected void doTestHiveCoercionWithDifferentTimestampPrecision(HiveTableDefini protected Map> expectedRowsForEngineProvider(Engine engine, HiveTimestampPrecision timestampPrecision) { + List timestampAsString = ImmutableList.of( + "2121-07-15 15:30:12.123499", + "2121-07-15 15:30:12.1235", + "2121-07-15 15:30:12.123501", + "2121-07-15 15:30:12.123499999", + "2121-07-15 15:30:12.1235", + "2121-07-15 15:30:12.123500001"); if (engine == Engine.HIVE) { List baseData = ImmutableList.of( - "{\"keep\":\"2121-07-15 15:30:12.123499\",\"si2i\":-1}", - "{\"keep\":\"2121-07-15 15:30:12.1235\",\"si2i\":-1}", - "{\"keep\":\"2121-07-15 15:30:12.123501\",\"si2i\":-1}", - "{\"keep\":\"2121-07-15 15:30:12.123499999\",\"si2i\":-1}", - "{\"keep\":\"2121-07-15 15:30:12.1235\",\"si2i\":-1}", - "{\"keep\":\"2121-07-15 15:30:12.123500001\",\"si2i\":-1}"); + "{\"keep\":\"2121-07-15 15:30:12.123499\",\"si2i\":-1,\"timestamp2string\":\"2121-07-15 15:30:12.123499\"}", + "{\"keep\":\"2121-07-15 15:30:12.1235\",\"si2i\":-1,\"timestamp2string\":\"2121-07-15 15:30:12.1235\"}", + "{\"keep\":\"2121-07-15 15:30:12.123501\",\"si2i\":-1,\"timestamp2string\":\"2121-07-15 15:30:12.123501\"}", + "{\"keep\":\"2121-07-15 15:30:12.123499999\",\"si2i\":-1,\"timestamp2string\":\"2121-07-15 15:30:12.123499999\"}", + "{\"keep\":\"2121-07-15 15:30:12.1235\",\"si2i\":-1,\"timestamp2string\":\"2121-07-15 15:30:12.1235\"}", + "{\"keep\":\"2121-07-15 15:30:12.123500001\",\"si2i\":-1,\"timestamp2string\":\"2121-07-15 15:30:12.123500001\"}"); return ImmutableMap.>builder() .put("timestamp_row_to_row", baseData) .put("timestamp_list_to_list", baseData.stream() @@ -451,6 +463,7 @@ protected Map> expectedRowsForEngineProvider(Engine engine, .put("timestamp_map_to_map", baseData.stream() .map("{2:%s}"::formatted) .collect(toImmutableList())) + .put("timestamp_to_string", timestampAsString) .put("id", nCopies(6, 1)) .buildOrThrow(); } @@ -479,10 +492,13 @@ protected Map> expectedRowsForEngineProvider(Engine engine, Timestamp.valueOf("2121-07-15 15:30:12.123500001")); }; - List baseData = timestampValue.stream() - .map(timestamp -> rowBuilder() + List baseData = Streams.zip( + timestampValue.stream(), + timestampAsString.stream(), + (timestamp, timestampCoerced) -> rowBuilder() .addField("keep", timestamp) .addField("si2i", -1) + .addField("timestamp2string", timestampCoerced) .build()) .collect(toImmutableList()); @@ -494,6 +510,7 @@ protected Map> expectedRowsForEngineProvider(Engine engine, .put("timestamp_map_to_map", baseData.stream() .map(entry -> ImmutableMap.of(2, entry)) .collect(toImmutableList())) + .put("timestamp_to_string", timestampAsString) .put("id", nCopies(6, 1)) .buildOrThrow(); } diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCoercionOnPartitionedTable.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCoercionOnPartitionedTable.java index 7ad6c749cad0..ea7cae009979 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCoercionOnPartitionedTable.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCoercionOnPartitionedTable.java @@ -135,9 +135,10 @@ private static HiveTableDefinition.HiveTableDefinitionBuilder tableDefinitionFor return HiveTableDefinition.builder(tableName) .setCreateTableDDLTemplate("" + "CREATE TABLE %NAME%(" + - " timestamp_row_to_row STRUCT, " + - " timestamp_list_to_list ARRAY>, " + - " timestamp_map_to_map MAP>" + + " timestamp_row_to_row STRUCT, " + + " timestamp_list_to_list ARRAY>, " + + " timestamp_map_to_map MAP>," + + " timestamp_to_string TIMESTAMP" + ") " + "PARTITIONED BY (id BIGINT) " + rowFormat.map(s -> format("ROW FORMAT %s ", s)).orElse("") + diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCoercionOnUnpartitionedTable.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCoercionOnUnpartitionedTable.java index 68adf1a33e60..82ec2347b987 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCoercionOnUnpartitionedTable.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveCoercionOnUnpartitionedTable.java @@ -90,9 +90,10 @@ private static HiveTableDefinition.HiveTableDefinitionBuilder tableDefinitionFor return HiveTableDefinition.builder(tableName) .setCreateTableDDLTemplate(""" CREATE TABLE %NAME%( - timestamp_row_to_row STRUCT, - timestamp_list_to_list ARRAY>, - timestamp_map_to_map MAP>, + timestamp_row_to_row STRUCT, + timestamp_list_to_list ARRAY>, + timestamp_map_to_map MAP>, + timestamp_to_string TIMESTAMP, id BIGINT) STORED AS\s""" + fileFormat); } @@ -146,6 +147,9 @@ protected Map expectedExceptionsWithTrinoContext() .put(columnContext("orc", "long_decimal_to_varchar"), "Cannot read SQL type 'varchar' from ORC stream '.long_decimal_to_varchar' of type DECIMAL") .put(columnContext("orc", "short_decimal_to_bounded_varchar"), "Cannot read SQL type 'varchar(30)' from ORC stream '.short_decimal_to_bounded_varchar' of type DECIMAL") .put(columnContext("orc", "long_decimal_to_bounded_varchar"), "Cannot read SQL type 'varchar(30)' from ORC stream '.long_decimal_to_bounded_varchar' of type DECIMAL") + .put(columnContext("orc", "timestamp_row_to_row"), "Cannot read SQL type 'varchar' from ORC stream '.timestamp_row_to_row.timestamp2string' of type TIMESTAMP with attributes {}") + .put(columnContext("orc", "timestamp_list_to_list"), "Cannot read SQL type 'varchar' from ORC stream '.timestamp_row_to_row.timestamp2string' of type TIMESTAMP with attributes {}") + .put(columnContext("orc", "timestamp_map_to_map"), "Cannot read SQL type 'varchar' from ORC stream '.timestamp_row_to_row.timestamp2string' of type TIMESTAMP with attributes {}") .buildOrThrow(); } }