Skip to content

Commit

Permalink
Remove redundant Optional use from HiveTableHandle.projectedColumns
Browse files Browse the repository at this point in the history
  • Loading branch information
findepi committed Nov 25, 2021
1 parent d99d2a6 commit e553381
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2576,8 +2576,7 @@ public Optional<ProjectionApplicationResult<ConnectorTableHandle>> applyProjecti
// all references are simple variables
if (columnProjections.values().stream().allMatch(ProjectedColumnRepresentation::isVariable)) {
Set<ColumnHandle> projectedColumns = ImmutableSet.copyOf(assignments.values());
if (hiveTableHandle.getProjectedColumns().isPresent()
&& hiveTableHandle.getProjectedColumns().get().equals(projectedColumns)) {
if (hiveTableHandle.getProjectedColumns().equals(projectedColumns)) {
return Optional.empty();
}
List<Assignment> assignmentsList = assignments.entrySet().stream()
Expand Down Expand Up @@ -2777,7 +2776,7 @@ public ConnectorTableHandle makeCompatiblePartitioning(ConnectorSession session,
hiveTable.getAnalyzePartitionValues(),
hiveTable.getAnalyzeColumnNames(),
ImmutableSet.of(),
Optional.empty(), // Projected columns is used only during optimization phase of planning
ImmutableSet.of(), // Projected columns is used only during optimization phase of planning
hiveTable.getTransaction(),
hiveTable.isRecordScannedFiles(),
hiveTable.getMaxScannedFileSize());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public class HiveTableHandle
private final Optional<List<List<String>>> analyzePartitionValues;
private final Optional<Set<String>> analyzeColumnNames;
private final Set<ColumnHandle> constraintColumns;
private final Optional<Set<ColumnHandle>> projectedColumns;
private final Set<ColumnHandle> projectedColumns;
private final AcidTransaction transaction;
private final boolean recordScannedFiles;
private final Optional<Long> maxScannedFileSize;
Expand Down Expand Up @@ -87,7 +87,7 @@ public HiveTableHandle(
analyzePartitionValues,
analyzeColumnNames,
ImmutableSet.of(),
Optional.empty(),
ImmutableSet.of(),
transaction,
false,
Optional.empty());
Expand Down Expand Up @@ -115,7 +115,7 @@ public HiveTableHandle(
Optional.empty(),
Optional.empty(),
ImmutableSet.of(),
Optional.empty(),
ImmutableSet.of(),
NO_ACID_TRANSACTION,
false,
Optional.empty());
Expand All @@ -135,7 +135,7 @@ public HiveTableHandle(
Optional<List<List<String>>> analyzePartitionValues,
Optional<Set<String>> analyzeColumnNames,
Set<ColumnHandle> constraintColumns,
Optional<Set<ColumnHandle>> projectedColumns,
Set<ColumnHandle> projectedColumns,
AcidTransaction transaction,
boolean recordScannedFiles,
Optional<Long> maxSplitFileSize)
Expand All @@ -153,7 +153,7 @@ public HiveTableHandle(
this.analyzePartitionValues = requireNonNull(analyzePartitionValues, "analyzePartitionValues is null").map(ImmutableList::copyOf);
this.analyzeColumnNames = requireNonNull(analyzeColumnNames, "analyzeColumnNames is null").map(ImmutableSet::copyOf);
this.constraintColumns = ImmutableSet.copyOf(requireNonNull(constraintColumns, "constraintColumns is null"));
this.projectedColumns = requireNonNull(projectedColumns, "projectedColumns is null").map(ImmutableSet::copyOf);
this.projectedColumns = ImmutableSet.copyOf(requireNonNull(projectedColumns, "projectedColumns is null"));
this.transaction = requireNonNull(transaction, "transaction is null");
this.recordScannedFiles = recordScannedFiles;
this.maxScannedFileSize = requireNonNull(maxSplitFileSize, "maxSplitFileSize is null");
Expand Down Expand Up @@ -264,7 +264,7 @@ public HiveTableHandle withProjectedColumns(Set<ColumnHandle> projectedColumns)
analyzePartitionValues,
analyzeColumnNames,
constraintColumns,
Optional.of(projectedColumns),
projectedColumns,
transaction,
recordScannedFiles,
maxScannedFileSize);
Expand Down Expand Up @@ -403,7 +403,7 @@ public Set<ColumnHandle> getConstraintColumns()

// do not serialize projected columns as they are not needed on workers
@JsonIgnore
public Optional<Set<ColumnHandle>> getProjectedColumns()
public Set<ColumnHandle> getProjectedColumns()
{
return projectedColumns;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3494,7 +3494,7 @@ private static void assertProjectionResult(Optional<ProjectionApplicationResult<

assertEquals(actualAssignments.size(), expectedAssignments.size());
assertEquals(
Optional.of(actualAssignments.values().stream().map(Assignment::getColumn).collect(toImmutableSet())),
actualAssignments.values().stream().map(Assignment::getColumn).collect(toImmutableSet()),
((HiveTableHandle) result.getHandle()).getProjectedColumns());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,8 +175,8 @@ public void testDereferencePushdown()
HiveTableHandle hiveTableHandle = (HiveTableHandle) table;
return hiveTableHandle.getCompactEffectivePredicate().equals(TupleDomain.withColumnDomains(
ImmutableMap.of(columnY, Domain.singleValue(BIGINT, 2L)))) &&
hiveTableHandle.getProjectedColumns().equals(Optional.of(
ImmutableSet.of(column1Handle, columnX, columnY)));
hiveTableHandle.getProjectedColumns().equals(
ImmutableSet.of(column1Handle, columnX, columnY));
},
TupleDomain.all(),
ImmutableMap.of("col0_y", columnY::equals, "col0_x", columnX::equals, "col1", column1Handle::equals)))));
Expand All @@ -192,8 +192,8 @@ public void testDereferencePushdown()
HiveTableHandle hiveTableHandle = (HiveTableHandle) table;
return hiveTableHandle.getCompactEffectivePredicate().equals(TupleDomain.withColumnDomains(
ImmutableMap.of(columnX, Domain.singleValue(BIGINT, 5L)))) &&
hiveTableHandle.getProjectedColumns().equals(Optional.of(
ImmutableSet.of(column0Handle, columnX)));
hiveTableHandle.getProjectedColumns().equals(
ImmutableSet.of(column0Handle, columnX));
},
TupleDomain.all(),
ImmutableMap.of("col0", column0Handle::equals, "col0_x", columnX::equals)))));
Expand Down

0 comments on commit e553381

Please sign in to comment.