From e5533812c3cd61ee8a9cc3d202413878eebbdf06 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Tue, 23 Nov 2021 14:11:23 +0100 Subject: [PATCH] Remove redundant Optional use from HiveTableHandle.projectedColumns --- .../java/io/trino/plugin/hive/HiveMetadata.java | 5 ++--- .../java/io/trino/plugin/hive/HiveTableHandle.java | 14 +++++++------- .../io/trino/plugin/hive/AbstractTestHive.java | 2 +- .../TestHiveProjectionPushdownIntoTableScan.java | 8 ++++---- 4 files changed, 14 insertions(+), 15 deletions(-) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java index 1ff436fdecdc..489716b06db0 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java @@ -2576,8 +2576,7 @@ public Optional> applyProjecti // all references are simple variables if (columnProjections.values().stream().allMatch(ProjectedColumnRepresentation::isVariable)) { Set projectedColumns = ImmutableSet.copyOf(assignments.values()); - if (hiveTableHandle.getProjectedColumns().isPresent() - && hiveTableHandle.getProjectedColumns().get().equals(projectedColumns)) { + if (hiveTableHandle.getProjectedColumns().equals(projectedColumns)) { return Optional.empty(); } List assignmentsList = assignments.entrySet().stream() @@ -2777,7 +2776,7 @@ public ConnectorTableHandle makeCompatiblePartitioning(ConnectorSession session, hiveTable.getAnalyzePartitionValues(), hiveTable.getAnalyzeColumnNames(), ImmutableSet.of(), - Optional.empty(), // Projected columns is used only during optimization phase of planning + ImmutableSet.of(), // Projected columns is used only during optimization phase of planning hiveTable.getTransaction(), hiveTable.isRecordScannedFiles(), hiveTable.getMaxScannedFileSize()); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java index ad3a392ea33d..153eb7d0325f 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java @@ -54,7 +54,7 @@ public class HiveTableHandle private final Optional>> analyzePartitionValues; private final Optional> analyzeColumnNames; private final Set constraintColumns; - private final Optional> projectedColumns; + private final Set projectedColumns; private final AcidTransaction transaction; private final boolean recordScannedFiles; private final Optional maxScannedFileSize; @@ -87,7 +87,7 @@ public HiveTableHandle( analyzePartitionValues, analyzeColumnNames, ImmutableSet.of(), - Optional.empty(), + ImmutableSet.of(), transaction, false, Optional.empty()); @@ -115,7 +115,7 @@ public HiveTableHandle( Optional.empty(), Optional.empty(), ImmutableSet.of(), - Optional.empty(), + ImmutableSet.of(), NO_ACID_TRANSACTION, false, Optional.empty()); @@ -135,7 +135,7 @@ public HiveTableHandle( Optional>> analyzePartitionValues, Optional> analyzeColumnNames, Set constraintColumns, - Optional> projectedColumns, + Set projectedColumns, AcidTransaction transaction, boolean recordScannedFiles, Optional maxSplitFileSize) @@ -153,7 +153,7 @@ public HiveTableHandle( this.analyzePartitionValues = requireNonNull(analyzePartitionValues, "analyzePartitionValues is null").map(ImmutableList::copyOf); this.analyzeColumnNames = requireNonNull(analyzeColumnNames, "analyzeColumnNames is null").map(ImmutableSet::copyOf); this.constraintColumns = ImmutableSet.copyOf(requireNonNull(constraintColumns, "constraintColumns is null")); - this.projectedColumns = requireNonNull(projectedColumns, "projectedColumns is null").map(ImmutableSet::copyOf); + this.projectedColumns = ImmutableSet.copyOf(requireNonNull(projectedColumns, "projectedColumns is null")); this.transaction = requireNonNull(transaction, "transaction is null"); this.recordScannedFiles = recordScannedFiles; this.maxScannedFileSize = requireNonNull(maxSplitFileSize, "maxSplitFileSize is null"); @@ -264,7 +264,7 @@ public HiveTableHandle withProjectedColumns(Set projectedColumns) analyzePartitionValues, analyzeColumnNames, constraintColumns, - Optional.of(projectedColumns), + projectedColumns, transaction, recordScannedFiles, maxScannedFileSize); @@ -403,7 +403,7 @@ public Set getConstraintColumns() // do not serialize projected columns as they are not needed on workers @JsonIgnore - public Optional> getProjectedColumns() + public Set getProjectedColumns() { return projectedColumns; } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java index 2c9085021fc8..5b4e6f081df3 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java @@ -3494,7 +3494,7 @@ private static void assertProjectionResult(Optional