Skip to content

Commit

Permalink
Remove redundant Optional use from HiveTableHandle.constraintColumns
Browse files Browse the repository at this point in the history
`Optional.empty()` and `Optional.of(Set.of())` had precisely same
meaning, and so `Optional` use was unnecessary.
  • Loading branch information
findepi committed Nov 25, 2021
1 parent d66b0f9 commit d99d2a6
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 46 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2539,8 +2539,8 @@ public void validateScan(ConnectorSession session, ConnectorTableHandle tableHan
if (isQueryPartitionFilterRequiredForTable(session, handle.getSchemaTableName()) && handle.getAnalyzePartitionValues().isEmpty() && handle.getEnforcedConstraint().isAll()) {
List<HiveColumnHandle> partitionColumns = handle.getPartitionColumns();
if (!partitionColumns.isEmpty()) {
Optional<Set<ColumnHandle>> referencedColumns = handle.getConstraintColumns();
if (referencedColumns.isEmpty() || Collections.disjoint(referencedColumns.get(), partitionColumns)) {
Set<ColumnHandle> referencedColumns = handle.getConstraintColumns();
if (Collections.disjoint(referencedColumns, partitionColumns)) {
String partitionColumnNames = partitionColumns.stream()
.map(HiveColumnHandle::getName)
.collect(joining(", "));
Expand Down Expand Up @@ -2776,7 +2776,7 @@ public ConnectorTableHandle makeCompatiblePartitioning(ConnectorSession session,
hiveTable.getBucketFilter(),
hiveTable.getAnalyzePartitionValues(),
hiveTable.getAnalyzeColumnNames(),
Optional.empty(),
ImmutableSet.of(),
Optional.empty(), // Projected columns is used only during optimization phase of planning
hiveTable.getTransaction(),
hiveTable.isRecordScannedFiles(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
import com.google.common.base.VerifyException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import io.trino.plugin.hive.authentication.HiveIdentity;
import io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore;
import io.trino.plugin.hive.util.HiveBucketing.HiveBucketFilter;
import io.trino.plugin.hive.util.Optionals;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ConnectorTableHandle;
Expand Down Expand Up @@ -191,7 +191,7 @@ public HiveTableHandle applyPartitionResult(HiveTableHandle handle, HivePartitio
partitions.getBucketFilter(),
handle.getAnalyzePartitionValues(),
handle.getAnalyzeColumnNames(),
Optionals.combine(handle.getConstraintColumns(), columns, Sets::union),
Sets.union(handle.getConstraintColumns(), columns.orElseGet(ImmutableSet::of)),
handle.getProjectedColumns(),
handle.getTransaction(),
handle.isRecordScannedFiles(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public class HiveTableHandle
private final Optional<HiveBucketFilter> bucketFilter;
private final Optional<List<List<String>>> analyzePartitionValues;
private final Optional<Set<String>> analyzeColumnNames;
private final Optional<Set<ColumnHandle>> constraintColumns;
private final Set<ColumnHandle> constraintColumns;
private final Optional<Set<ColumnHandle>> projectedColumns;
private final AcidTransaction transaction;
private final boolean recordScannedFiles;
Expand Down Expand Up @@ -86,7 +86,7 @@ public HiveTableHandle(
bucketFilter,
analyzePartitionValues,
analyzeColumnNames,
Optional.empty(),
ImmutableSet.of(),
Optional.empty(),
transaction,
false,
Expand Down Expand Up @@ -114,7 +114,7 @@ public HiveTableHandle(
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
ImmutableSet.of(),
Optional.empty(),
NO_ACID_TRANSACTION,
false,
Expand All @@ -134,7 +134,7 @@ public HiveTableHandle(
Optional<HiveBucketFilter> bucketFilter,
Optional<List<List<String>>> analyzePartitionValues,
Optional<Set<String>> analyzeColumnNames,
Optional<Set<ColumnHandle>> constraintColumns,
Set<ColumnHandle> constraintColumns,
Optional<Set<ColumnHandle>> projectedColumns,
AcidTransaction transaction,
boolean recordScannedFiles,
Expand All @@ -152,7 +152,7 @@ public HiveTableHandle(
this.bucketFilter = requireNonNull(bucketFilter, "bucketFilter is null");
this.analyzePartitionValues = requireNonNull(analyzePartitionValues, "analyzePartitionValues is null").map(ImmutableList::copyOf);
this.analyzeColumnNames = requireNonNull(analyzeColumnNames, "analyzeColumnNames is null").map(ImmutableSet::copyOf);
this.constraintColumns = requireNonNull(constraintColumns, "constraintColumns is null").map(ImmutableSet::copyOf);
this.constraintColumns = ImmutableSet.copyOf(requireNonNull(constraintColumns, "constraintColumns is null"));
this.projectedColumns = requireNonNull(projectedColumns, "projectedColumns is null").map(ImmutableSet::copyOf);
this.transaction = requireNonNull(transaction, "transaction is null");
this.recordScannedFiles = recordScannedFiles;
Expand Down Expand Up @@ -396,7 +396,7 @@ public AcidTransaction getTransaction()

// do not serialize constraint columns as they are not needed on workers
@JsonIgnore
public Optional<Set<ColumnHandle>> getConstraintColumns()
public Set<ColumnHandle> getConstraintColumns()
{
return constraintColumns;
}
Expand Down

This file was deleted.

0 comments on commit d99d2a6

Please sign in to comment.