Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add REGEX to storage formats and missing table properties #16271

Merged
merged 1 commit into from
Feb 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,8 @@
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import java.util.stream.Stream;

import static com.google.common.base.MoreObjects.firstNonNull;
Expand Down Expand Up @@ -223,6 +225,8 @@
import static io.trino.plugin.hive.HiveTableProperties.ORC_BLOOM_FILTER_COLUMNS;
import static io.trino.plugin.hive.HiveTableProperties.ORC_BLOOM_FILTER_FPP;
import static io.trino.plugin.hive.HiveTableProperties.PARTITIONED_BY_PROPERTY;
import static io.trino.plugin.hive.HiveTableProperties.REGEX_CASE_INSENSITIVE;
import static io.trino.plugin.hive.HiveTableProperties.REGEX_PATTERN;
import static io.trino.plugin.hive.HiveTableProperties.SKIP_FOOTER_LINE_COUNT;
import static io.trino.plugin.hive.HiveTableProperties.SKIP_HEADER_LINE_COUNT;
import static io.trino.plugin.hive.HiveTableProperties.SORTED_BY_PROPERTY;
Expand All @@ -240,7 +244,9 @@
import static io.trino.plugin.hive.HiveTableProperties.getOrcBloomFilterColumns;
import static io.trino.plugin.hive.HiveTableProperties.getOrcBloomFilterFpp;
import static io.trino.plugin.hive.HiveTableProperties.getPartitionedBy;
import static io.trino.plugin.hive.HiveTableProperties.getRegexPattern;
import static io.trino.plugin.hive.HiveTableProperties.getSingleCharacterProperty;
import static io.trino.plugin.hive.HiveTableProperties.isRegexCaseInsensitive;
import static io.trino.plugin.hive.HiveTableProperties.isTransactional;
import static io.trino.plugin.hive.HiveTimestampPrecision.NANOSECONDS;
import static io.trino.plugin.hive.HiveType.HIVE_STRING;
Expand Down Expand Up @@ -357,6 +363,9 @@ public class HiveMetadata
private static final String CSV_QUOTE_KEY = "quoteChar";
private static final String CSV_ESCAPE_KEY = "escapeChar";

private static final String REGEX_KEY = "input.regex";
private static final String REGEX_CASE_SENSITIVE_KEY = "input.regex.case.insensitive";

private static final String AUTO_PURGE_KEY = "auto.purge";

public static final String MODIFYING_NON_TRANSACTIONAL_TABLE_MESSAGE = "Modifying Hive table rows is only supported for transactional tables";
Expand Down Expand Up @@ -685,6 +694,12 @@ private ConnectorTableMetadata doGetTableMetadata(ConnectorSession session, Sche
getCsvSerdeProperty(table, CSV_ESCAPE_KEY)
.ifPresent(csvEscape -> properties.put(CSV_ESCAPE, csvEscape));

// REGEX specific properties
getSerdeProperty(table, REGEX_KEY)
.ifPresent(regex -> properties.put(REGEX_PATTERN, regex));
getSerdeProperty(table, REGEX_CASE_SENSITIVE_KEY)
.ifPresent(regexCaseInsensitive -> properties.put(REGEX_CASE_INSENSITIVE, parseBoolean(regexCaseInsensitive)));

Optional<String> comment = Optional.ofNullable(table.getParameters().get(TABLE_COMMENT));

String autoPurgeProperty = table.getParameters().get(AUTO_PURGE_KEY);
Expand Down Expand Up @@ -1094,6 +1109,30 @@ else if (avroSchemaLiteral != null) {
tableProperties.put(CSV_SEPARATOR_KEY, separator.toString());
});

// REGEX specific properties
getRegexPattern(tableMetadata.getProperties())
.ifPresentOrElse(
regexPattern -> {
checkFormatForProperty(hiveStorageFormat, HiveStorageFormat.REGEX, REGEX_PATTERN);
try {
Pattern.compile(regexPattern);
}
catch (PatternSyntaxException e) {
throw new TrinoException(INVALID_TABLE_PROPERTY, "Invalid REGEX pattern value: " + regexPattern);
}
tableProperties.put(REGEX_KEY, regexPattern);
},
() -> {
if (hiveStorageFormat == HiveStorageFormat.REGEX) {
throw new TrinoException(INVALID_TABLE_PROPERTY, format("REGEX format requires the '%s' table property", REGEX_PATTERN));
}
});
isRegexCaseInsensitive(tableMetadata.getProperties())
.ifPresent(regexCaseInsensitive -> {
checkFormatForProperty(hiveStorageFormat, HiveStorageFormat.REGEX, REGEX_CASE_INSENSITIVE);
tableProperties.put(REGEX_CASE_SENSITIVE_KEY, String.valueOf(regexCaseInsensitive));
});

// Set bogus table stats to prevent Hive 2.x from gathering these stats at table creation.
// These stats are not useful by themselves and can take very long time to collect when creating an
// external table over large data set.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import io.trino.plugin.hive.line.CsvPageSourceFactory;
import io.trino.plugin.hive.line.JsonFileWriterFactory;
import io.trino.plugin.hive.line.JsonPageSourceFactory;
import io.trino.plugin.hive.line.RegexFileWriterFactory;
import io.trino.plugin.hive.line.RegexPageSourceFactory;
import io.trino.plugin.hive.line.SimpleSequenceFilePageSourceFactory;
import io.trino.plugin.hive.line.SimpleSequenceFileWriterFactory;
Expand Down Expand Up @@ -147,6 +148,7 @@ public void configure(Binder binder)
configBinder(binder).bindConfig(OrcWriterConfig.class);
fileWriterFactoryBinder.addBinding().to(CsvFileWriterFactory.class).in(Scopes.SINGLETON);
fileWriterFactoryBinder.addBinding().to(JsonFileWriterFactory.class).in(Scopes.SINGLETON);
fileWriterFactoryBinder.addBinding().to(RegexFileWriterFactory.class).in(Scopes.SINGLETON);
fileWriterFactoryBinder.addBinding().to(SimpleTextFileWriterFactory.class).in(Scopes.SINGLETON);
fileWriterFactoryBinder.addBinding().to(SimpleSequenceFileWriterFactory.class).in(Scopes.SINGLETON);
fileWriterFactoryBinder.addBinding().to(OrcFileWriterFactory.class).in(Scopes.SINGLETON);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import static io.trino.plugin.hive.util.HiveClassNames.PARQUET_HIVE_SERDE_CLASS;
import static io.trino.plugin.hive.util.HiveClassNames.RCFILE_INPUT_FORMAT_CLASS;
import static io.trino.plugin.hive.util.HiveClassNames.RCFILE_OUTPUT_FORMAT_CLASS;
import static io.trino.plugin.hive.util.HiveClassNames.REGEX_HIVE_SERDE_CLASS;
import static io.trino.plugin.hive.util.HiveClassNames.SEQUENCEFILE_INPUT_FORMAT_CLASS;
import static io.trino.plugin.hive.util.HiveClassNames.TEXT_INPUT_FORMAT_CLASS;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
Expand Down Expand Up @@ -101,6 +102,11 @@ public enum HiveStorageFormat
OPENCSV_SERDE_CLASS,
TEXT_INPUT_FORMAT_CLASS,
HIVE_IGNORE_KEY_OUTPUT_FORMAT_CLASS,
DataSize.of(8, Unit.MEGABYTE)),
REGEX(
REGEX_HIVE_SERDE_CLASS,
TEXT_INPUT_FORMAT_CLASS,
HIVE_IGNORE_KEY_OUTPUT_FORMAT_CLASS,
DataSize.of(8, Unit.MEGABYTE));

private final String serde;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ public class HiveTableProperties
public static final String CSV_SEPARATOR = "csv_separator";
public static final String CSV_QUOTE = "csv_quote";
public static final String CSV_ESCAPE = "csv_escape";
public static final String REGEX_PATTERN = "regex";
public static final String REGEX_CASE_INSENSITIVE = "regex_case_insensitive";
public static final String TRANSACTIONAL = "transactional";
public static final String AUTO_PURGE = "auto_purge";

Expand Down Expand Up @@ -153,6 +155,8 @@ public HiveTableProperties(
stringProperty(CSV_SEPARATOR, "CSV separator character", null, false),
stringProperty(CSV_QUOTE, "CSV quote character", null, false),
stringProperty(CSV_ESCAPE, "CSV escape character", null, false),
stringProperty(REGEX_PATTERN, "REGEX pattern", null, false),
dain marked this conversation as resolved.
Show resolved Hide resolved
booleanProperty(REGEX_CASE_INSENSITIVE, "REGEX pattern is case insensitive", null, false),
booleanProperty(TRANSACTIONAL, "Table is transactional", null, false),
booleanProperty(AUTO_PURGE, "Skip trash when table or partition is deleted", config.isAutoPurge(), false),
booleanProperty(
Expand Down Expand Up @@ -288,6 +292,16 @@ public static Optional<Character> getSingleCharacterProperty(Map<String, Object>
return Optional.of(stringValue.charAt(0));
}

public static Optional<String> getRegexPattern(Map<String, Object> tableProperties)
{
return Optional.ofNullable((String) tableProperties.get(REGEX_PATTERN));
}

public static Optional<Boolean> isRegexCaseInsensitive(Map<String, Object> tableProperties)
{
return Optional.ofNullable((Boolean) tableProperties.get(REGEX_CASE_INSENSITIVE));
}

public static Optional<Boolean> isTransactional(Map<String, Object> tableProperties)
{
return Optional.ofNullable((Boolean) tableProperties.get(TRANSACTIONAL));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.hive.line;

import io.trino.plugin.hive.FileWriter;
import io.trino.plugin.hive.HiveFileWriterFactory;
import io.trino.plugin.hive.WriterKind;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.metastore.StorageFormat;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorSession;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;

import java.util.List;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Properties;

import static io.trino.plugin.hive.HiveErrorCode.HIVE_WRITER_OPEN_ERROR;
import static io.trino.plugin.hive.util.HiveClassNames.REGEX_HIVE_SERDE_CLASS;

public class RegexFileWriterFactory
implements HiveFileWriterFactory
{
@Override
public Optional<FileWriter> createFileWriter(
Path path,
List<String> inputColumnNames,
StorageFormat storageFormat,
Properties schema,
JobConf configuration,
ConnectorSession session,
OptionalInt bucketNumber,
AcidTransaction transaction,
boolean useAcidSchema,
WriterKind writerKind)
{
if (REGEX_HIVE_SERDE_CLASS.equals(storageFormat.getSerde())) {
throw new TrinoException(HIVE_WRITER_OPEN_ERROR, "REGEX format is read-only");
}
return Optional.empty();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ public final class HiveClassNames
public static final String PARQUET_HIVE_SERDE_CLASS = "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe";
public static final String RCFILE_INPUT_FORMAT_CLASS = "org.apache.hadoop.hive.ql.io.RCFileInputFormat";
public static final String RCFILE_OUTPUT_FORMAT_CLASS = "org.apache.hadoop.hive.ql.io.RCFileOutputFormat";
public static final String REGEX_HIVE_SERDE_CLASS = "org.apache.hadoop.hive.serde2.RegexSerDe";
public static final String SEQUENCEFILE_INPUT_FORMAT_CLASS = "org.apache.hadoop.mapred.SequenceFileInputFormat";
public static final String SYMLINK_TEXT_INPUT_FORMAT_CLASS = "org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat";
public static final String TEXT_INPUT_FORMAT_CLASS = "org.apache.hadoop.mapred.TextInputFormat";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@
import static io.trino.plugin.hive.HiveStorageFormat.PARQUET;
import static io.trino.plugin.hive.HiveStorageFormat.RCBINARY;
import static io.trino.plugin.hive.HiveStorageFormat.RCTEXT;
import static io.trino.plugin.hive.HiveStorageFormat.REGEX;
import static io.trino.plugin.hive.HiveStorageFormat.SEQUENCEFILE;
import static io.trino.plugin.hive.HiveStorageFormat.TEXTFILE;
import static io.trino.plugin.hive.HiveTableProperties.BUCKETED_BY_PROPERTY;
Expand Down Expand Up @@ -503,8 +504,8 @@ private static RowType toRowType(List<ColumnMetadata> columns)

protected Set<HiveStorageFormat> createTableFormats = difference(
ImmutableSet.copyOf(HiveStorageFormat.values()),
// exclude formats that change table schema with serde
ImmutableSet.of(AVRO, CSV));
// exclude formats that change table schema with serde and read-only formats
ImmutableSet.of(AVRO, CSV, REGEX));

private static final TypeOperators TYPE_OPERATORS = new TypeOperators();
private static final BlockTypeOperators BLOCK_TYPE_OPERATORS = new BlockTypeOperators(TYPE_OPERATORS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -496,6 +496,10 @@ public void testTableCreation()
// CSV supports only unbounded VARCHAR type
continue;
}
if (storageFormat == HiveStorageFormat.REGEX) {
// REGEX format is read-only
continue;
}
createTable(temporaryCreateTable, storageFormat);
dropTable(temporaryCreateTable);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@
import static io.trino.plugin.hive.HiveQueryRunner.HIVE_CATALOG;
import static io.trino.plugin.hive.HiveQueryRunner.TPCH_SCHEMA;
import static io.trino.plugin.hive.HiveQueryRunner.createBucketedSession;
import static io.trino.plugin.hive.HiveStorageFormat.REGEX;
import static io.trino.plugin.hive.HiveTableProperties.AUTO_PURGE;
import static io.trino.plugin.hive.HiveTableProperties.BUCKETED_BY_PROPERTY;
import static io.trino.plugin.hive.HiveTableProperties.BUCKET_COUNT_PROPERTY;
Expand Down Expand Up @@ -2055,6 +2056,10 @@ public void testEmptyBucketedTable()
{
// create empty bucket files for all storage formats and compression codecs
for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) {
if (storageFormat == REGEX) {
// REGEX format is readonly
continue;
}
for (HiveCompressionCodec compressionCodec : HiveCompressionCodec.values()) {
if ((storageFormat == HiveStorageFormat.AVRO) && (compressionCodec == HiveCompressionCodec.LZ4)) {
continue;
Expand Down Expand Up @@ -8588,6 +8593,10 @@ private List<TestingHiveStorageFormat> getAllTestingHiveStorageFormat()
// CSV supports only unbounded VARCHAR type
continue;
}
if (hiveStorageFormat == REGEX) {
// REGEX format is read-only
continue;
}
if (hiveStorageFormat == HiveStorageFormat.PARQUET) {
formats.add(new TestingHiveStorageFormat(
Session.builder(session)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import io.trino.plugin.hive.line.CsvPageSourceFactory;
import io.trino.plugin.hive.line.JsonFileWriterFactory;
import io.trino.plugin.hive.line.JsonPageSourceFactory;
import io.trino.plugin.hive.line.RegexFileWriterFactory;
import io.trino.plugin.hive.line.RegexPageSourceFactory;
import io.trino.plugin.hive.line.SimpleSequenceFilePageSourceFactory;
import io.trino.plugin.hive.line.SimpleSequenceFileWriterFactory;
Expand Down Expand Up @@ -224,6 +225,7 @@ public static Set<HiveFileWriterFactory> getDefaultHiveFileWriterFactories(HiveC
return ImmutableSet.<HiveFileWriterFactory>builder()
.add(new CsvFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER))
.add(new JsonFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER))
.add(new RegexFileWriterFactory())
.add(new SimpleTextFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER))
.add(new SimpleSequenceFileWriterFactory(fileSystemFactory, TESTING_TYPE_MANAGER, nodeVersion))
.add(new RcFileFileWriterFactory(hdfsEnvironment, TESTING_TYPE_MANAGER, nodeVersion, hiveConfig))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,10 @@ public void testAllFormats()
// CSV supports only unbounded VARCHAR type, which is not provided by lineitem
continue;
}
if (format == HiveStorageFormat.REGEX) {
// REGEX format is readonly
continue;
}
config.setHiveStorageFormat(format);
config.setHiveCompressionCodec(NONE);
long uncompressedLength = writeTestFile(config, metastore, makeFileName(tempDir, config));
Expand Down
Loading