Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove duplicate isIcebergTable, isDeltaLakeTable methods #18341

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@
import com.amazonaws.services.glue.model.Table;
import com.google.inject.Inject;
import com.google.inject.Provider;
import io.trino.plugin.hive.metastore.glue.DefaultGlueMetastoreTableFilterProvider;
import io.trino.plugin.hive.util.HiveUtil;

import java.util.function.Predicate;

import static io.trino.plugin.hive.metastore.glue.converter.GlueToTrinoConverter.getTableParameters;

public class DeltaLakeGlueMetastoreTableFilterProvider
implements Provider<Predicate<Table>>
{
Expand All @@ -35,7 +37,7 @@ public DeltaLakeGlueMetastoreTableFilterProvider(DeltaLakeGlueMetastoreConfig co
public Predicate<Table> get()
{
if (hideNonDeltaLakeTables) {
return DefaultGlueMetastoreTableFilterProvider::isDeltaLakeTable;
return table -> HiveUtil.isDeltaLakeTable(getTableParameters(table));
}
return table -> true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3758,6 +3758,15 @@ public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session,
{
requireNonNull(session, "session is null");
requireNonNull(tableName, "tableName is null");

Optional<String> icebergCatalogName = getIcebergCatalogName(session);
Optional<String> deltaLakeCatalogName = getDeltaLakeCatalogName(session);
Optional<String> hudiCatalogName = getHudiCatalogName(session);

if (icebergCatalogName.isEmpty() && deltaLakeCatalogName.isEmpty() && hudiCatalogName.isEmpty()) {
return Optional.empty();
}

if (isHiveSystemSchema(tableName.getSchemaName())) {
return Optional.empty();
}
Expand All @@ -3768,9 +3777,10 @@ public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session,
return Optional.empty();
}

Optional<CatalogSchemaTableName> catalogSchemaTableName = redirectTableToIceberg(session, table.get())
.or(() -> redirectTableToDeltaLake(session, table.get()))
.or(() -> redirectTableToHudi(session, table.get()));
Optional<CatalogSchemaTableName> catalogSchemaTableName = Optional.<CatalogSchemaTableName>empty()
.or(() -> redirectTableToIceberg(icebergCatalogName, table.get()))
.or(() -> redirectTableToDeltaLake(deltaLakeCatalogName, table.get()))
.or(() -> redirectTableToHudi(hudiCatalogName, table.get()));

// stitch back the suffix we cut off.
return catalogSchemaTableName.map(name -> new CatalogSchemaTableName(
Expand All @@ -3780,9 +3790,8 @@ public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session,
name.getSchemaTableName().getTableName() + tableNameSplit.getSuffix().orElse(""))));
}

private Optional<CatalogSchemaTableName> redirectTableToIceberg(ConnectorSession session, Table table)
private Optional<CatalogSchemaTableName> redirectTableToIceberg(Optional<String> targetCatalogName, Table table)
{
Optional<String> targetCatalogName = getIcebergCatalogName(session);
if (targetCatalogName.isEmpty()) {
return Optional.empty();
}
Expand All @@ -3792,9 +3801,8 @@ private Optional<CatalogSchemaTableName> redirectTableToIceberg(ConnectorSession
return Optional.empty();
}

private Optional<CatalogSchemaTableName> redirectTableToDeltaLake(ConnectorSession session, Table table)
private Optional<CatalogSchemaTableName> redirectTableToDeltaLake(Optional<String> targetCatalogName, Table table)
{
Optional<String> targetCatalogName = getDeltaLakeCatalogName(session);
if (targetCatalogName.isEmpty()) {
return Optional.empty();
}
Expand All @@ -3804,9 +3812,8 @@ private Optional<CatalogSchemaTableName> redirectTableToDeltaLake(ConnectorSessi
return Optional.empty();
}

private Optional<CatalogSchemaTableName> redirectTableToHudi(ConnectorSession session, Table table)
private Optional<CatalogSchemaTableName> redirectTableToHudi(Optional<String> targetCatalogName, Table table)
{
Optional<String> targetCatalogName = getHudiCatalogName(session);
if (targetCatalogName.isEmpty()) {
return Optional.empty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,10 @@
import com.google.inject.Provider;
import io.trino.plugin.hive.HideDeltaLakeTables;

import java.util.Map;
import java.util.function.Predicate;

import static io.trino.plugin.hive.metastore.glue.converter.GlueToTrinoConverter.getTableParameters;
import static io.trino.plugin.hive.util.HiveUtil.DELTA_LAKE_PROVIDER;
import static io.trino.plugin.hive.util.HiveUtil.SPARK_TABLE_PROVIDER_KEY;
import static io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable;
import static java.util.function.Predicate.not;

public class DefaultGlueMetastoreTableFilterProvider
Expand All @@ -41,14 +39,8 @@ public DefaultGlueMetastoreTableFilterProvider(@HideDeltaLakeTables boolean hide
public Predicate<Table> get()
{
if (hideDeltaLakeTables) {
return not(DefaultGlueMetastoreTableFilterProvider::isDeltaLakeTable);
return not(table -> isDeltaLakeTable(getTableParameters(table)));
}
return table -> true;
}

public static boolean isDeltaLakeTable(Table table)
{
Map<String, String> parameters = getTableParameters(table);
return parameters.getOrDefault(SPARK_TABLE_PROVIDER_KEY, "").equalsIgnoreCase(DELTA_LAKE_PROVIDER);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1126,8 +1126,7 @@ public static boolean isDeltaLakeTable(Table table)

public static boolean isDeltaLakeTable(Map<String, String> tableParameters)
{
return tableParameters.containsKey(SPARK_TABLE_PROVIDER_KEY)
&& tableParameters.get(SPARK_TABLE_PROVIDER_KEY).toLowerCase(ENGLISH).equals(DELTA_LAKE_PROVIDER);
return DELTA_LAKE_PROVIDER.equalsIgnoreCase(tableParameters.get(SPARK_TABLE_PROVIDER_KEY));
}

public static boolean isIcebergTable(Table table)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@
import static io.trino.plugin.iceberg.IcebergMetadataColumn.FILE_PATH;
import static io.trino.plugin.iceberg.IcebergMetadataColumn.isMetadataColumnId;
import static io.trino.plugin.iceberg.IcebergSessionProperties.getExpireSnapshotMinRetention;
import static io.trino.plugin.iceberg.IcebergSessionProperties.getHiveCatalogName;
import static io.trino.plugin.iceberg.IcebergSessionProperties.getRemoveOrphanFilesMinRetention;
import static io.trino.plugin.iceberg.IcebergSessionProperties.isCollectExtendedStatisticsOnWrite;
import static io.trino.plugin.iceberg.IcebergSessionProperties.isExtendedStatisticsEnabled;
Expand Down Expand Up @@ -2879,7 +2880,11 @@ public void setColumnComment(ConnectorSession session, ConnectorTableHandle tabl
@Override
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName)
{
return catalog.redirectTable(session, tableName);
Optional<String> targetCatalogName = getHiveCatalogName(session);
if (targetCatalogName.isEmpty()) {
return Optional.empty();
}
return catalog.redirectTable(session, tableName, targetCatalogName.get());
}

private static CollectedStatistics processComputedTableStatistics(Table table, Collection<ComputedStatistics> computedStatistics)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,8 +147,6 @@
import static java.math.RoundingMode.UNNECESSARY;
import static java.util.Comparator.comparing;
import static java.util.Objects.requireNonNull;
import static org.apache.iceberg.BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE;
import static org.apache.iceberg.BaseMetastoreTableOperations.TABLE_TYPE_PROP;
import static org.apache.iceberg.LocationProviders.locationsFor;
import static org.apache.iceberg.MetadataTableUtils.createMetadataTableInstance;
import static org.apache.iceberg.TableProperties.DEFAULT_FILE_FORMAT;
Expand Down Expand Up @@ -180,11 +178,6 @@ public final class IcebergUtil

private IcebergUtil() {}

public static boolean isIcebergTable(io.trino.plugin.hive.metastore.Table table)
{
return ICEBERG_TABLE_TYPE_VALUE.equalsIgnoreCase(table.getParameters().get(TABLE_TYPE_PROP));
}

public static Table loadIcebergTable(TrinoCatalog catalog, IcebergTableOperationsProvider tableOperationsProvider, ConnectorSession session, SchemaTableName table)
{
TableOperations operations = tableOperationsProvider.createTableOperations(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,5 +136,5 @@ void createMaterializedView(

void updateColumnComment(ConnectorSession session, SchemaTableName schemaTableName, ColumnIdentity columnIdentity, Optional<String> comment);

Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName);
Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName, String hiveCatalogName);
}
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@
import static io.trino.plugin.iceberg.IcebergMaterializedViewDefinition.encodeMaterializedViewData;
import static io.trino.plugin.iceberg.IcebergMaterializedViewDefinition.fromConnectorMaterializedViewDefinition;
import static io.trino.plugin.iceberg.IcebergSchemaProperties.LOCATION_PROPERTY;
import static io.trino.plugin.iceberg.IcebergSessionProperties.getHiveCatalogName;
import static io.trino.plugin.iceberg.IcebergUtil.getIcebergTableWithMetadata;
import static io.trino.plugin.iceberg.IcebergUtil.quotedTableName;
import static io.trino.plugin.iceberg.IcebergUtil.validateTableCanBeDropped;
Expand Down Expand Up @@ -1019,14 +1018,12 @@ public void renameMaterializedView(ConnectorSession session, SchemaTableName sou
}

@Override
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName)
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName, String hiveCatalogName)
{
requireNonNull(session, "session is null");
requireNonNull(tableName, "tableName is null");
Optional<String> targetCatalogName = getHiveCatalogName(session);
if (targetCatalogName.isEmpty()) {
return Optional.empty();
}
requireNonNull(hiveCatalogName, "hiveCatalogName is null");

if (isHiveSystemSchema(tableName.getSchemaName())) {
return Optional.empty();
}
Expand All @@ -1044,7 +1041,7 @@ public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session,
}
if (!isIcebergTable(getTableParameters(table.get()))) {
// After redirecting, use the original table name, with "$partitions" and similar suffixes
return targetCatalogName.map(catalog -> new CatalogSchemaTableName(catalog, tableName));
return Optional.of(new CatalogSchemaTableName(hiveCatalogName, tableName));
}
return Optional.empty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@
import static io.trino.plugin.hive.ViewReaderUtil.isHiveOrPrestoView;
import static io.trino.plugin.hive.ViewReaderUtil.isPrestoView;
import static io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES;
import static io.trino.plugin.hive.util.HiveUtil.isIcebergTable;
import static io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_INVALID_METADATA;
import static io.trino.plugin.iceberg.IcebergUtil.isIcebergTable;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,13 +75,12 @@
import static io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES;
import static io.trino.plugin.hive.metastore.StorageFormat.VIEW_STORAGE_FORMAT;
import static io.trino.plugin.hive.util.HiveUtil.isHiveSystemSchema;
import static io.trino.plugin.hive.util.HiveUtil.isIcebergTable;
import static io.trino.plugin.iceberg.IcebergMaterializedViewAdditionalProperties.STORAGE_SCHEMA;
import static io.trino.plugin.iceberg.IcebergMaterializedViewDefinition.encodeMaterializedViewData;
import static io.trino.plugin.iceberg.IcebergMaterializedViewDefinition.fromConnectorMaterializedViewDefinition;
import static io.trino.plugin.iceberg.IcebergSchemaProperties.LOCATION_PROPERTY;
import static io.trino.plugin.iceberg.IcebergSessionProperties.getHiveCatalogName;
import static io.trino.plugin.iceberg.IcebergUtil.getIcebergTableWithMetadata;
import static io.trino.plugin.iceberg.IcebergUtil.isIcebergTable;
import static io.trino.plugin.iceberg.IcebergUtil.loadIcebergTable;
import static io.trino.plugin.iceberg.IcebergUtil.validateTableCanBeDropped;
import static io.trino.plugin.iceberg.catalog.AbstractIcebergTableOperations.ICEBERG_METASTORE_STORAGE_FORMAT;
Expand Down Expand Up @@ -603,14 +602,12 @@ private List<String> listNamespaces(ConnectorSession session, Optional<String> n
}

@Override
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName)
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName, String hiveCatalogName)
{
requireNonNull(session, "session is null");
requireNonNull(tableName, "tableName is null");
Optional<String> targetCatalogName = getHiveCatalogName(session);
if (targetCatalogName.isEmpty()) {
return Optional.empty();
}
requireNonNull(hiveCatalogName, "hiveCatalogName is null");

if (isHiveSystemSchema(tableName.getSchemaName())) {
return Optional.empty();
}
Expand All @@ -628,7 +625,7 @@ public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session,
}
if (!isIcebergTable(table.get())) {
// After redirecting, use the original table name, with "$partitions" and similar suffixes
return targetCatalogName.map(catalog -> new CatalogSchemaTableName(catalog, tableName));
return Optional.of(new CatalogSchemaTableName(hiveCatalogName, tableName));
}
return Optional.empty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ public void renameMaterializedView(ConnectorSession session, SchemaTableName sou
}

@Override
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName)
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName, String hiveCatalogName)
{
return Optional.empty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ public void renameMaterializedView(ConnectorSession session, SchemaTableName sou
}

@Override
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName)
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName, String hiveCatalogName)
{
return Optional.empty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ public void updateColumnComment(ConnectorSession session, SchemaTableName schema
}

@Override
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName)
public Optional<CatalogSchemaTableName> redirectTable(ConnectorSession session, SchemaTableName tableName, String hiveCatalogName)
{
return Optional.empty();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,9 @@
import static io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES;
import static io.trino.plugin.hive.util.HiveUtil.isDeltaLakeTable;
import static io.trino.plugin.hive.util.HiveUtil.isHudiTable;
import static io.trino.plugin.hive.util.HiveUtil.isIcebergTable;
import static io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_COMMIT_ERROR;
import static io.trino.plugin.iceberg.IcebergSecurityConfig.IcebergSecurity.SYSTEM;
import static io.trino.plugin.iceberg.IcebergUtil.isIcebergTable;
import static io.trino.plugin.iceberg.PartitionFields.parsePartitionFields;
import static io.trino.plugin.iceberg.TypeConverter.toIcebergTypeForNewColumn;
import static io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT;
Expand Down