Skip to content

Commit

Permalink
Fix HiveUtil IntelliJ warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
dain committed Apr 21, 2023
1 parent 3705e8e commit 33382bb
Showing 1 changed file with 22 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import io.airlift.compress.lzo.LzopCodec;
import io.airlift.slice.Slice;
import io.airlift.slice.SliceUtf8;
import io.airlift.slice.Slices;
import io.trino.hadoop.TextLineLengthLimitExceededException;
import io.trino.hive.formats.compression.CompressionKind;
import io.trino.orc.OrcWriterOptions;
Expand Down Expand Up @@ -173,15 +172,14 @@
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.joining;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR;

public final class HiveUtil
{
public static final String SPARK_TABLE_PROVIDER_KEY = "spark.sql.sources.provider";
public static final String DELTA_LAKE_PROVIDER = "delta";
public static final String SPARK_TABLE_BUCKET_NUMBER_KEY = "spark.sql.sources.schema.numBuckets";
private static final String SPARK_TABLE_BUCKET_NUMBER_KEY = "spark.sql.sources.schema.numBuckets";

public static final String ICEBERG_TABLE_TYPE_NAME = "table_type";
public static final String ICEBERG_TABLE_TYPE_VALUE = "iceberg";
Expand Down Expand Up @@ -309,23 +307,23 @@ private static <K, V> void skipHeader(RecordReader<K, V> reader, int headerCount
}
}

public static void setReadColumns(Configuration configuration, List<Integer> readHiveColumnIndexes)
private static void setReadColumns(Configuration configuration, List<Integer> readHiveColumnIndexes)
{
configuration.set(READ_COLUMN_IDS_CONF_STR, Joiner.on(',').join(readHiveColumnIndexes));
configuration.setBoolean(READ_ALL_COLUMNS, false);
}

private static void configureCompressionCodecs(JobConf jobConf)
{
// add Airlift LZO and LZOP to head of codecs list so as to not override existing entries
// add Airlift LZO and LZOP to head of codecs list to not override existing entries
List<String> codecs = newArrayList(Splitter.on(",").trimResults().omitEmptyStrings().split(jobConf.get("io.compression.codecs", "")));
if (!codecs.contains(LzoCodec.class.getName())) {
codecs.add(0, LzoCodec.class.getName());
}
if (!codecs.contains(LzopCodec.class.getName())) {
codecs.add(0, LzopCodec.class.getName());
}
jobConf.set("io.compression.codecs", codecs.stream().collect(joining(",")));
jobConf.set("io.compression.codecs", String.join(",", codecs));
}

public static Optional<CompressionCodec> getCompressionCodec(TextInputFormat inputFormat, Path file)
Expand Down Expand Up @@ -399,7 +397,7 @@ public static String getInputFormatName(Properties schema)
return name;
}

public static long parseHiveDate(String value)
private static long parseHiveDate(String value)
{
LocalDateTime date = HIVE_DATE_PARSER.parseLocalDateTime(value);
if (!date.toLocalTime().equals(LocalTime.MIDNIGHT)) {
Expand Down Expand Up @@ -532,7 +530,7 @@ private static void validate(Deserializer deserializer)
}
}

public static boolean isHiveNull(byte[] bytes)
private static boolean isHiveNull(byte[] bytes)
{
return bytes.length == 2 && bytes[0] == '\\' && bytes[1] == 'N';
}
Expand Down Expand Up @@ -684,7 +682,7 @@ public static NullableValue parsePartitionValue(String partitionName, String val
if (isNull) {
return NullableValue.asNull(type);
}
return NullableValue.of(type, Slices.utf8Slice(value));
return NullableValue.of(type, utf8Slice(value));
}

throw new VerifyException(format("Unhandled type [%s] for partition: %s", type, partitionName));
Expand All @@ -695,7 +693,7 @@ public static boolean isStructuralType(Type type)
return (type instanceof ArrayType) || (type instanceof MapType) || (type instanceof RowType);
}

public static boolean booleanPartitionKey(String value, String name)
private static boolean booleanPartitionKey(String value, String name)
{
if (value.equalsIgnoreCase("true")) {
return true;
Expand All @@ -706,7 +704,7 @@ public static boolean booleanPartitionKey(String value, String name)
throw new TrinoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for BOOLEAN partition key: %s", value, name));
}

public static long bigintPartitionKey(String value, String name)
private static long bigintPartitionKey(String value, String name)
{
try {
return parseLong(value);
Expand All @@ -716,7 +714,7 @@ public static long bigintPartitionKey(String value, String name)
}
}

public static long integerPartitionKey(String value, String name)
private static long integerPartitionKey(String value, String name)
{
try {
return parseInt(value);
Expand All @@ -726,7 +724,7 @@ public static long integerPartitionKey(String value, String name)
}
}

public static long smallintPartitionKey(String value, String name)
private static long smallintPartitionKey(String value, String name)
{
try {
return parseShort(value);
Expand All @@ -736,7 +734,7 @@ public static long smallintPartitionKey(String value, String name)
}
}

public static long tinyintPartitionKey(String value, String name)
private static long tinyintPartitionKey(String value, String name)
{
try {
return parseByte(value);
Expand All @@ -746,7 +744,7 @@ public static long tinyintPartitionKey(String value, String name)
}
}

public static long floatPartitionKey(String value, String name)
private static long floatPartitionKey(String value, String name)
{
try {
return floatToRawIntBits(parseFloat(value));
Expand All @@ -756,7 +754,7 @@ public static long floatPartitionKey(String value, String name)
}
}

public static double doublePartitionKey(String value, String name)
private static double doublePartitionKey(String value, String name)
{
try {
return parseDouble(value);
Expand All @@ -766,7 +764,7 @@ public static double doublePartitionKey(String value, String name)
}
}

public static long datePartitionKey(String value, String name)
private static long datePartitionKey(String value, String name)
{
try {
return parseHiveDate(value);
Expand All @@ -776,7 +774,7 @@ public static long datePartitionKey(String value, String name)
}
}

public static long timestampPartitionKey(String value, String name)
private static long timestampPartitionKey(String value, String name)
{
try {
return parseHiveTimestamp(value);
Expand All @@ -786,12 +784,12 @@ public static long timestampPartitionKey(String value, String name)
}
}

public static long shortDecimalPartitionKey(String value, DecimalType type, String name)
private static long shortDecimalPartitionKey(String value, DecimalType type, String name)
{
return decimalPartitionKey(value, type, name).unscaledValue().longValue();
}

public static Int128 longDecimalPartitionKey(String value, DecimalType type, String name)
private static Int128 longDecimalPartitionKey(String value, DecimalType type, String name)
{
return Int128.valueOf(decimalPartitionKey(value, type, name).unscaledValue());
}
Expand All @@ -815,19 +813,19 @@ private static BigDecimal decimalPartitionKey(String value, DecimalType type, St
}
}

public static Slice varcharPartitionKey(String value, String name, Type columnType)
private static Slice varcharPartitionKey(String value, String name, Type columnType)
{
Slice partitionKey = Slices.utf8Slice(value);
Slice partitionKey = utf8Slice(value);
VarcharType varcharType = (VarcharType) columnType;
if (!varcharType.isUnbounded() && SliceUtf8.countCodePoints(partitionKey) > varcharType.getBoundedLength()) {
throw new TrinoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for %s partition key: %s", value, columnType, name));
}
return partitionKey;
}

public static Slice charPartitionKey(String value, String name, Type columnType)
private static Slice charPartitionKey(String value, String name, Type columnType)
{
Slice partitionKey = trimTrailingSpaces(Slices.utf8Slice(value));
Slice partitionKey = trimTrailingSpaces(utf8Slice(value));
CharType charType = (CharType) columnType;
if (SliceUtf8.countCodePoints(partitionKey) > charType.getLength()) {
throw new TrinoException(HIVE_INVALID_PARTITION_VALUE, format("Invalid partition value '%s' for %s partition key: %s", value, columnType, name));
Expand Down

0 comments on commit 33382bb

Please sign in to comment.