Skip to content

Commit

Permalink
Merge branch 'apache:trunk' into trunk
Browse files Browse the repository at this point in the history
  • Loading branch information
skysiders authored Apr 27, 2022
2 parents 1e23bc2 + f187e9b commit ccf7e78
Show file tree
Hide file tree
Showing 603 changed files with 44,125 additions and 6,337 deletions.
18 changes: 9 additions & 9 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -218,12 +218,12 @@ com.aliyun.oss:aliyun-sdk-oss:3.13.2
com.amazonaws:aws-java-sdk-bundle:1.11.901
com.cedarsoftware:java-util:1.9.0
com.cedarsoftware:json-io:2.5.1
com.fasterxml.jackson.core:jackson-annotations:2.13.0
com.fasterxml.jackson.core:jackson-core:2.13.0
com.fasterxml.jackson.core:jackson-databind:2.13.0
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.13.0
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.13.0
com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.13.0
com.fasterxml.jackson.core:jackson-annotations:2.13.2
com.fasterxml.jackson.core:jackson-core:2.13.2
com.fasterxml.jackson.core:jackson-databind:2.13.2.2
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:2.13.2
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:2.13.2
com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.13.2
com.fasterxml.uuid:java-uuid-generator:3.1.4
com.fasterxml.woodstox:woodstox-core:5.3.0
com.github.davidmoten:rxjava-extras:0.8.0.17
Expand Down Expand Up @@ -283,7 +283,7 @@ log4j:log4j:1.2.17
net.java.dev.jna:jna:5.2.0
net.minidev:accessors-smart:1.2
net.minidev:json-smart:2.4.7
org.apache.avro:avro:1.7.7
org.apache.avro:avro:1.9.2
org.apache.commons:commons-collections4:4.2
org.apache.commons:commons-compress:1.21
org.apache.commons:commons-configuration2:2.1.1
Expand Down Expand Up @@ -468,8 +468,8 @@ com.microsoft.azure:azure-cosmosdb-gateway:2.4.5
com.microsoft.azure:azure-data-lake-store-sdk:2.3.3
com.microsoft.azure:azure-keyvault-core:1.0.0
com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre7
org.bouncycastle:bcpkix-jdk15on:1.60
org.bouncycastle:bcprov-jdk15on:1.60
org.bouncycastle:bcpkix-jdk15on:1.68
org.bouncycastle:bcprov-jdk15on:1.68
org.checkerframework:checker-qual:2.5.2
org.codehaus.mojo:animal-sniffer-annotations:1.17
org.jruby.jcodings:jcodings:1.0.13
Expand Down
4 changes: 4 additions & 0 deletions dev-support/bin/create-release
Original file line number Diff line number Diff line change
Expand Up @@ -535,6 +535,10 @@ function makearelease

big_console_header "Cleaning the Source Tree"

# Since CVE-2022-24765 in April 2022, git refuses to work in directories
# whose owner != the current user, unless explicitly told to trust it.
git config --global --add safe.directory /build/source

# git clean to clear any remnants from previous build
run "${GIT}" clean -xdf -e /patchprocess

Expand Down
7 changes: 7 additions & 0 deletions dev-support/docker/Dockerfile_centos_8
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,13 @@ COPY pkg-resolver pkg-resolver
RUN chmod a+x pkg-resolver/*.sh pkg-resolver/*.py \
&& chmod a+r pkg-resolver/*.json

######
# Centos 8 has reached its EOL and the packages
# are no longer available on mirror.centos.org site.
# Please see https://www.centos.org/centos-linux-eol/
######
RUN pkg-resolver/set-vault-as-baseurl-centos.sh centos:8

######
# Install packages from yum
######
Expand Down
33 changes: 33 additions & 0 deletions dev-support/docker/pkg-resolver/set-vault-as-baseurl-centos.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#!/usr/bin/env bash

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

if [ $# -lt 1 ]; then
echo "ERROR: Need at least 1 argument, $# were provided"
exit 1
fi

if [ "$1" == "centos:7" ] || [ "$1" == "centos:8" ]; then
cd /etc/yum.repos.d/ || exit &&
sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* &&
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* &&
yum update -y &&
cd /root || exit
else
echo "ERROR: Setting the archived baseurl is only supported for centos 7 and 8 environments"
exit 1
fi
3 changes: 3 additions & 0 deletions hadoop-client-modules/hadoop-client-api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,9 @@
<!-- Exclude snappy-java -->
<exclude>org/xerial/snappy/*</exclude>
<exclude>org/xerial/snappy/**/*</exclude>
<!-- Exclude org.widlfly.openssl -->
<exclude>org/wildfly/openssl/*</exclude>
<exclude>org/wildfly/openssl/**/*</exclude>
</excludes>
</relocation>
<relocation>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
<dependency>
<groupId>org.codehaus.mojo</groupId>
<artifactId>extra-enforcer-rules</artifactId>
<version>1.0-beta-3</version>
<version>1.5.1</version>
</dependency>
</dependencies>
<executions>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
<dependency>
<groupId>org.codehaus.mojo</groupId>
<artifactId>extra-enforcer-rules</artifactId>
<version>1.0-beta-3</version>
<version>1.5.1</version>
</dependency>
</dependencies>
<executions>
Expand Down
6 changes: 6 additions & 0 deletions hadoop-client-modules/hadoop-client-integration-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,12 @@
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
<type>test-jar</type>
<exclusions>
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
Expand Down
22 changes: 18 additions & 4 deletions hadoop-client-modules/hadoop-client-minicluster/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,10 @@
<groupId>org.apache.hadoop.thirdparty</groupId>
<artifactId>hadoop-shaded-guava</artifactId>
</exclusion>
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Add optional runtime dependency on the in-development timeline server module
Expand Down Expand Up @@ -403,8 +407,8 @@
<!-- Skip commons-logging:commons-logging-api because it looks like nothing actually included it -->
<!-- Skip jetty-util because it's in client -->
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
<optional>true</optional>
<exclusions>
<exclusion>
Expand Down Expand Up @@ -451,9 +455,19 @@
<optional>true</optional>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-servlet</artifactId>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-servlet</artifactId>
<optional>true</optional>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.enterprise</groupId>
<artifactId>cdi-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- skip org.apache.avro:avro-ipc because it doesn't look like hadoop-common actually uses it -->
<dependency>
Expand Down
4 changes: 4 additions & 0 deletions hadoop-cloud-storage-project/hadoop-cloud-storage/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,10 @@
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
<exclusion>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down
12 changes: 8 additions & 4 deletions hadoop-cloud-storage-project/hadoop-huaweicloud/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,14 @@
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>org.javassist</groupId>
<artifactId>javassist</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down
16 changes: 16 additions & 0 deletions hadoop-common-project/hadoop-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,16 @@
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-servlet</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>javax.enterprise</groupId>
<artifactId>cdi-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<!-- Used, even though 'mvn dependency:analyze' doesn't find it -->
Expand Down Expand Up @@ -205,6 +215,12 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-configuration2</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
Expand Down
63 changes: 39 additions & 24 deletions hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
Original file line number Diff line number Diff line change
Expand Up @@ -251,30 +251,45 @@ log4j.appender.NMAUDIT.MaxBackupIndex=${nm.audit.log.maxbackupindex}
#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd

# Http Server Request Logs
#log4j.logger.http.requests.namenode=INFO,namenoderequestlog
#log4j.appender.namenoderequestlog=org.apache.hadoop.http.HttpRequestLogAppender
#log4j.appender.namenoderequestlog.Filename=${hadoop.log.dir}/jetty-namenode-yyyy_mm_dd.log
#log4j.appender.namenoderequestlog.RetainDays=3

#log4j.logger.http.requests.datanode=INFO,datanoderequestlog
#log4j.appender.datanoderequestlog=org.apache.hadoop.http.HttpRequestLogAppender
#log4j.appender.datanoderequestlog.Filename=${hadoop.log.dir}/jetty-datanode-yyyy_mm_dd.log
#log4j.appender.datanoderequestlog.RetainDays=3

#log4j.logger.http.requests.resourcemanager=INFO,resourcemanagerrequestlog
#log4j.appender.resourcemanagerrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
#log4j.appender.resourcemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-resourcemanager-yyyy_mm_dd.log
#log4j.appender.resourcemanagerrequestlog.RetainDays=3

#log4j.logger.http.requests.jobhistory=INFO,jobhistoryrequestlog
#log4j.appender.jobhistoryrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
#log4j.appender.jobhistoryrequestlog.Filename=${hadoop.log.dir}/jetty-jobhistory-yyyy_mm_dd.log
#log4j.appender.jobhistoryrequestlog.RetainDays=3

#log4j.logger.http.requests.nodemanager=INFO,nodemanagerrequestlog
#log4j.appender.nodemanagerrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
#log4j.appender.nodemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-nodemanager-yyyy_mm_dd.log
#log4j.appender.nodemanagerrequestlog.RetainDays=3
#log4j.appender.AccessNNDRFA=org.apache.log4j.DailyRollingFileAppender
#log4j.appender.AccessNNDRFA.File=${hadoop.log.dir}/jetty-namenode.log
#log4j.appender.AccessNNDRFA.DatePattern=.yyyy-MM-dd
#log4j.appender.AccessNNDRFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.AccessNNDRFA.layout.ConversionPattern=%m%n

#log4j.logger.http.requests.namenode=INFO,AccessNNDRFA

#log4j.appender.AccessDNDRFA=org.apache.log4j.DailyRollingFileAppender
#log4j.appender.AccessDNDRFA.File=${hadoop.log.dir}/jetty-datanode.log
#log4j.appender.AccessDNDRFA.DatePattern=.yyyy-MM-dd
#log4j.appender.AccessDNDRFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.AccessDNDRFA.layout.ConversionPattern=%m%n

#log4j.logger.http.requests.datanode=INFO,AccessDNDRFA

#log4j.appender.AccessRMDRFA=org.apache.log4j.DailyRollingFileAppender
#log4j.appender.AccessRMDRFA.File=${hadoop.log.dir}/jetty-resourcemanager.log
#log4j.appender.AccessRMDRFA.DatePattern=.yyyy-MM-dd
#log4j.appender.AccessRMDRFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.AccessRMDRFA.layout.ConversionPattern=%m%n

#log4j.logger.http.requests.resourcemanager=INFO,AccessRMDRFA

#log4j.appender.AccessJHDRFA=org.apache.log4j.DailyRollingFileAppender
#log4j.appender.AccessJHDRFA.File=${hadoop.log.dir}/jetty-jobhistory.log
#log4j.appender.AccessJHDRFA.DatePattern=.yyyy-MM-dd
#log4j.appender.AccessJHDRFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.AccessJHDRFA.layout.ConversionPattern=%m%n

#log4j.logger.http.requests.jobhistory=INFO,AccessJHDRFA

#log4j.appender.AccessNMDRFA=org.apache.log4j.DailyRollingFileAppender
#log4j.appender.AccessNMDRFA.File=${hadoop.log.dir}/jetty-jobhistory.log
#log4j.appender.AccessNMDRFA.DatePattern=.yyyy-MM-dd
#log4j.appender.AccessNMDRFA.layout=org.apache.log4j.PatternLayout
#log4j.appender.AccessNMDRFA.layout.ConversionPattern=%m%n

#log4j.logger.http.requests.nodemanager=INFO,AccessNMDRFA

# WebHdfs request log on datanodes
# Specify -Ddatanode.webhdfs.logger=INFO,HTTPDRFA on datanode startup to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY;
import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL;
import static org.apache.hadoop.util.functional.FutureIO.awaitFuture;

/** Adapts an {@link FSDataInputStream} to Avro's SeekableInput interface. */
@InterfaceAudience.Public
@InterfaceStability.Stable
Expand All @@ -42,7 +46,12 @@ public AvroFSInput(final FSDataInputStream in, final long len) {
public AvroFSInput(final FileContext fc, final Path p) throws IOException {
FileStatus status = fc.getFileStatus(p);
this.len = status.getLen();
this.stream = fc.open(p);
this.stream = awaitFuture(fc.openFile(p)
.opt(FS_OPTION_OPENFILE_READ_POLICY,
FS_OPTION_OPENFILE_READ_POLICY_SEQUENTIAL)
.withFileStatus(status)
.build());
fc.open(p);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -154,10 +155,20 @@ boolean running() {
/**
* How long in between runs of the background refresh.
*/
long getRefreshInterval() {
@VisibleForTesting
public long getRefreshInterval() {
return refreshInterval;
}

/**
* Randomize the refresh interval timing by this amount, the actual interval will be chosen
* uniformly between {@code interval-jitter} and {@code interval+jitter}.
*/
@VisibleForTesting
public long getJitter() {
return jitter;
}

/**
* Reset the current used data amount. This should be called
* when the cached value is re-computed.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.io.InputStream;
import java.nio.channels.ClosedChannelException;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.CompletableFuture;
Expand All @@ -45,6 +44,7 @@
import org.apache.hadoop.util.LambdaUtils;
import org.apache.hadoop.util.Progressable;

import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_STANDARD_OPTIONS;
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
import static org.apache.hadoop.fs.impl.StoreImplementationUtils.isProbeForSyncable;

Expand Down Expand Up @@ -889,7 +889,7 @@ protected CompletableFuture<FSDataInputStream> openFileWithOptions(
final OpenFileParameters parameters) throws IOException {
AbstractFSBuilderImpl.rejectUnknownMandatoryKeys(
parameters.getMandatoryKeys(),
Collections.emptySet(),
FS_OPTION_OPENFILE_STANDARD_OPTIONS,
"for " + path);
return LambdaUtils.eval(
new CompletableFuture<>(),
Expand Down
Loading

0 comments on commit ccf7e78

Please sign in to comment.