From d99d2a6734134320a1b6a1e8932ef553aae7706f Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Tue, 23 Nov 2021 14:00:34 +0100 Subject: [PATCH] Remove redundant Optional use from HiveTableHandle.constraintColumns `Optional.empty()` and `Optional.of(Set.of())` had precisely same meaning, and so `Optional` use was unnecessary. --- .../io/trino/plugin/hive/HiveMetadata.java | 6 ++-- .../plugin/hive/HivePartitionManager.java | 4 +-- .../io/trino/plugin/hive/HiveTableHandle.java | 12 +++---- .../io/trino/plugin/hive/util/Optionals.java | 35 ------------------- 4 files changed, 11 insertions(+), 46 deletions(-) delete mode 100644 plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/Optionals.java diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java index 66b96b17f074..1ff436fdecdc 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java @@ -2539,8 +2539,8 @@ public void validateScan(ConnectorSession session, ConnectorTableHandle tableHan if (isQueryPartitionFilterRequiredForTable(session, handle.getSchemaTableName()) && handle.getAnalyzePartitionValues().isEmpty() && handle.getEnforcedConstraint().isAll()) { List partitionColumns = handle.getPartitionColumns(); if (!partitionColumns.isEmpty()) { - Optional> referencedColumns = handle.getConstraintColumns(); - if (referencedColumns.isEmpty() || Collections.disjoint(referencedColumns.get(), partitionColumns)) { + Set referencedColumns = handle.getConstraintColumns(); + if (Collections.disjoint(referencedColumns, partitionColumns)) { String partitionColumnNames = partitionColumns.stream() .map(HiveColumnHandle::getName) .collect(joining(", ")); @@ -2776,7 +2776,7 @@ public ConnectorTableHandle makeCompatiblePartitioning(ConnectorSession session, hiveTable.getBucketFilter(), hiveTable.getAnalyzePartitionValues(), hiveTable.getAnalyzeColumnNames(), - Optional.empty(), + ImmutableSet.of(), Optional.empty(), // Projected columns is used only during optimization phase of planning hiveTable.getTransaction(), hiveTable.isRecordScannedFiles(), diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java index beb34811f4ce..e80257dd955f 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java @@ -16,11 +16,11 @@ import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.trino.plugin.hive.authentication.HiveIdentity; import io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore; import io.trino.plugin.hive.util.HiveBucketing.HiveBucketFilter; -import io.trino.plugin.hive.util.Optionals; import io.trino.spi.TrinoException; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.ConnectorTableHandle; @@ -191,7 +191,7 @@ public HiveTableHandle applyPartitionResult(HiveTableHandle handle, HivePartitio partitions.getBucketFilter(), handle.getAnalyzePartitionValues(), handle.getAnalyzeColumnNames(), - Optionals.combine(handle.getConstraintColumns(), columns, Sets::union), + Sets.union(handle.getConstraintColumns(), columns.orElseGet(ImmutableSet::of)), handle.getProjectedColumns(), handle.getTransaction(), handle.isRecordScannedFiles(), diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java index 8090c0337d4e..ad3a392ea33d 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java @@ -53,7 +53,7 @@ public class HiveTableHandle private final Optional bucketFilter; private final Optional>> analyzePartitionValues; private final Optional> analyzeColumnNames; - private final Optional> constraintColumns; + private final Set constraintColumns; private final Optional> projectedColumns; private final AcidTransaction transaction; private final boolean recordScannedFiles; @@ -86,7 +86,7 @@ public HiveTableHandle( bucketFilter, analyzePartitionValues, analyzeColumnNames, - Optional.empty(), + ImmutableSet.of(), Optional.empty(), transaction, false, @@ -114,7 +114,7 @@ public HiveTableHandle( Optional.empty(), Optional.empty(), Optional.empty(), - Optional.empty(), + ImmutableSet.of(), Optional.empty(), NO_ACID_TRANSACTION, false, @@ -134,7 +134,7 @@ public HiveTableHandle( Optional bucketFilter, Optional>> analyzePartitionValues, Optional> analyzeColumnNames, - Optional> constraintColumns, + Set constraintColumns, Optional> projectedColumns, AcidTransaction transaction, boolean recordScannedFiles, @@ -152,7 +152,7 @@ public HiveTableHandle( this.bucketFilter = requireNonNull(bucketFilter, "bucketFilter is null"); this.analyzePartitionValues = requireNonNull(analyzePartitionValues, "analyzePartitionValues is null").map(ImmutableList::copyOf); this.analyzeColumnNames = requireNonNull(analyzeColumnNames, "analyzeColumnNames is null").map(ImmutableSet::copyOf); - this.constraintColumns = requireNonNull(constraintColumns, "constraintColumns is null").map(ImmutableSet::copyOf); + this.constraintColumns = ImmutableSet.copyOf(requireNonNull(constraintColumns, "constraintColumns is null")); this.projectedColumns = requireNonNull(projectedColumns, "projectedColumns is null").map(ImmutableSet::copyOf); this.transaction = requireNonNull(transaction, "transaction is null"); this.recordScannedFiles = recordScannedFiles; @@ -396,7 +396,7 @@ public AcidTransaction getTransaction() // do not serialize constraint columns as they are not needed on workers @JsonIgnore - public Optional> getConstraintColumns() + public Set getConstraintColumns() { return constraintColumns; } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/Optionals.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/Optionals.java deleted file mode 100644 index 3c6c77cecc8c..000000000000 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/Optionals.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.trino.plugin.hive.util; - -import java.util.Optional; -import java.util.function.BinaryOperator; - -public final class Optionals -{ - private Optionals() {} - - public static Optional combine(Optional left, Optional right, BinaryOperator combiner) - { - if (left.isPresent() && right.isPresent()) { - return Optional.of(combiner.apply(left.get(), right.get())); - } - else if (left.isPresent()) { - return left; - } - else { - return right; - } - } -}