Skip to content

Commit 298640f

Browse files
author
Anuj Modi
committed
Backmerging with trunk
2 parents 820e2e0 + d107931 commit 298640f

File tree

230 files changed

+9435
-1730
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

230 files changed

+9435
-1730
lines changed

LICENSE-binary

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,6 @@ com.aliyun:aliyun-java-sdk-kms:2.11.0
216216
com.aliyun:aliyun-java-sdk-ram:3.1.0
217217
com.aliyun:aliyun-java-sdk-sts:3.0.0
218218
com.aliyun.oss:aliyun-sdk-oss:3.13.2
219-
com.amazonaws:aws-java-sdk-bundle:1.12.599
220219
com.cedarsoftware:java-util:1.9.0
221220
com.cedarsoftware:json-io:2.5.1
222221
com.fasterxml.jackson.core:jackson-annotations:2.12.7
@@ -318,7 +317,7 @@ org.apache.htrace:htrace-core:3.1.0-incubating
318317
org.apache.htrace:htrace-core4:4.1.0-incubating
319318
org.apache.httpcomponents:httpclient:4.5.13
320319
org.apache.httpcomponents:httpcore:4.4.13
321-
org.apache.kafka:kafka-clients:2.8.2
320+
org.apache.kafka:kafka-clients:3.4.0
322321
org.apache.kerby:kerb-admin:2.0.3
323322
org.apache.kerby:kerb-client:2.0.3
324323
org.apache.kerby:kerb-common:2.0.3
@@ -378,7 +377,7 @@ hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/com
378377
hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/util/tree.h
379378
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/compat/{fstatat|openat|unlinkat}.h
380379

381-
com.github.luben:zstd-jni:1.4.9-1
380+
com.github.luben:zstd-jni:1.5.2-1
382381
dnsjava:dnsjava:2.1.7
383382
org.codehaus.woodstox:stax2-api:4.2.1
384383

dev-support/bin/yetus-wrapper

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ WANTED="$1"
7777
shift
7878
ARGV=("$@")
7979

80-
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.14.0}
80+
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.14.1}
8181
BIN=$(yetus_abs "${BASH_SOURCE-$0}")
8282
BINDIR=$(dirname "${BIN}")
8383

@@ -123,7 +123,7 @@ fi
123123
## need to DL, etc
124124
##
125125

126-
BASEURL="https://archive.apache.org/dist/yetus/${HADOOP_YETUS_VERSION}/"
126+
BASEURL="https://downloads.apache.org/yetus/${HADOOP_YETUS_VERSION}/"
127127
TARBALL="${YETUS_PREFIX}-${HADOOP_YETUS_VERSION}-bin.tar"
128128

129129
GPGBIN=$(command -v gpg)

dev-support/docker/Dockerfile_windows_10

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,8 +61,8 @@ RUN powershell Invoke-WebRequest -URI https://cdn.azul.com/zulu/bin/zulu8.62.0.1
6161
RUN powershell Expand-Archive -Path $Env:TEMP\zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip -DestinationPath "C:\Java"
6262

6363
# Install Apache Maven.
64-
RUN powershell Invoke-WebRequest -URI https://archive.apache.org/dist/maven/maven-3/3.8.6/binaries/apache-maven-3.8.6-bin.zip -OutFile $Env:TEMP\apache-maven-3.8.6-bin.zip
65-
RUN powershell Expand-Archive -Path $Env:TEMP\apache-maven-3.8.6-bin.zip -DestinationPath "C:\Maven"
64+
RUN powershell Invoke-WebRequest -URI https://downloads.apache.org/maven/maven-3/3.8.8/binaries/apache-maven-3.8.8-bin.zip -OutFile $Env:TEMP\apache-maven-3.8.8-bin.zip
65+
RUN powershell Expand-Archive -Path $Env:TEMP\apache-maven-3.8.8-bin.zip -DestinationPath "C:\Maven"
6666

6767
# Install CMake 3.19.0.
6868
RUN powershell Invoke-WebRequest -URI https://cmake.org/files/v3.19/cmake-3.19.0-win64-x64.zip -OutFile $Env:TEMP\cmake-3.19.0-win64-x64.zip
@@ -135,7 +135,7 @@ ENV MAVEN_OPTS '-Xmx2048M -Xss128M'
135135
ENV IS_WINDOWS 1
136136
RUN setx PATH "%PATH%;%ALLUSERSPROFILE%\chocolatey\bin"
137137
RUN setx PATH "%PATH%;%JAVA_HOME%\bin"
138-
RUN setx PATH "%PATH%;C:\Maven\apache-maven-3.8.6\bin"
138+
RUN setx PATH "%PATH%;C:\Maven\apache-maven-3.8.8\bin"
139139
RUN setx PATH "%PATH%;C:\CMake\cmake-3.19.0-win64-x64\bin"
140140
RUN setx PATH "%PATH%;C:\ZStd"
141141
RUN setx PATH "%PATH%;C:\Program Files\Git\usr\bin"

hadoop-common-project/hadoop-auth/pom.xml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,11 @@
136136
</dependency>
137137
<dependency>
138138
<groupId>org.apache.kerby</groupId>
139-
<artifactId>kerb-simplekdc</artifactId>
139+
<artifactId>kerb-core</artifactId>
140+
</dependency>
141+
<dependency>
142+
<groupId>org.apache.kerby</groupId>
143+
<artifactId>kerb-util</artifactId>
140144
</dependency>
141145
<dependency>
142146
<groupId>org.apache.directory.server</groupId>

hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1911,7 +1911,7 @@ function hadoop_start_secure_daemon
19111911
if [[ ! -f "${jsvc}" ]]; then
19121912
hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
19131913
hadoop_error "or privileged daemons. Please download and install jsvc from "
1914-
hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
1914+
hadoop_error "https://downloads.apache.org/commons/daemon/binaries/ "
19151915
hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
19161916
exit 1
19171917
fi
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.crypto;
19+
20+
import java.security.Provider;
21+
import java.security.Security;
22+
23+
import org.slf4j.Logger;
24+
import org.slf4j.LoggerFactory;
25+
26+
import org.apache.hadoop.classification.InterfaceAudience;
27+
import org.apache.hadoop.conf.Configuration;
28+
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
29+
import org.apache.hadoop.fs.store.LogExactlyOnce;
30+
31+
/** Utility methods for the crypto related features. */
32+
@InterfaceAudience.Private
33+
public final class CryptoUtils {
34+
static final Logger LOG = LoggerFactory.getLogger(CryptoUtils.class);
35+
private static final LogExactlyOnce LOG_FAILED_TO_LOAD_CLASS = new LogExactlyOnce(LOG);
36+
private static final LogExactlyOnce LOG_FAILED_TO_ADD_PROVIDER = new LogExactlyOnce(LOG);
37+
38+
private static final String BOUNCY_CASTLE_PROVIDER_CLASS
39+
= "org.bouncycastle.jce.provider.BouncyCastleProvider";
40+
static final String BOUNCY_CASTLE_PROVIDER_NAME = "BC";
41+
42+
/**
43+
* Get the security provider value specified in
44+
* {@link CommonConfigurationKeysPublic#HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY}
45+
* from the given conf.
46+
*
47+
* @param conf the configuration
48+
* @return the configured provider, if there is any; otherwise, return an empty string.
49+
*/
50+
public static String getJceProvider(Configuration conf) {
51+
final String provider = conf.getTrimmed(
52+
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY, "");
53+
final boolean autoAdd = conf.getBoolean(
54+
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_KEY,
55+
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_DEFAULT);
56+
57+
// For backward compatible, auto-add BOUNCY_CASTLE_PROVIDER_CLASS when the provider is "BC".
58+
if (autoAdd && BOUNCY_CASTLE_PROVIDER_NAME.equals(provider)) {
59+
try {
60+
// Use reflection in order to avoid statically loading the class.
61+
final Class<?> clazz = Class.forName(BOUNCY_CASTLE_PROVIDER_CLASS);
62+
Security.addProvider((Provider) clazz.getConstructor().newInstance());
63+
LOG.debug("Successfully added security provider {}", provider);
64+
if (LOG.isTraceEnabled()) {
65+
LOG.trace("Trace", new Throwable());
66+
}
67+
} catch (ClassNotFoundException e) {
68+
LOG_FAILED_TO_LOAD_CLASS.warn("Failed to load " + BOUNCY_CASTLE_PROVIDER_CLASS, e);
69+
} catch (Exception e) {
70+
LOG_FAILED_TO_ADD_PROVIDER.warn("Failed to add security provider for {}", provider, e);
71+
}
72+
}
73+
return provider;
74+
}
75+
76+
private CryptoUtils() { }
77+
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceCtrCryptoCodec.java

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
*/
1818
package org.apache.hadoop.crypto;
1919

20-
import org.bouncycastle.jce.provider.BouncyCastleProvider;
2120
import org.apache.hadoop.util.Preconditions;
2221
import org.apache.hadoop.classification.InterfaceAudience;
2322
import org.apache.hadoop.classification.InterfaceStability;
@@ -27,13 +26,11 @@
2726
import java.nio.ByteBuffer;
2827
import java.security.GeneralSecurityException;
2928
import java.security.SecureRandom;
30-
import java.security.Security;
3129
import javax.crypto.Cipher;
3230
import javax.crypto.spec.IvParameterSpec;
3331
import javax.crypto.spec.SecretKeySpec;
3432
import org.slf4j.Logger;
3533

36-
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY;
3734
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY;
3835
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_DEFAULT;
3936

@@ -48,10 +45,6 @@ public String getProvider() {
4845
return provider;
4946
}
5047

51-
public void setProvider(String provider) {
52-
this.provider = provider;
53-
}
54-
5548
public void calculateIV(byte[] initIV, long counter,
5649
byte[] iv, int blockSize) {
5750
Preconditions.checkArgument(initIV.length == blockSize);
@@ -82,17 +75,15 @@ public Configuration getConf() {
8275

8376
public void setConf(Configuration conf) {
8477
this.conf = conf;
85-
setProvider(conf.get(HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY));
86-
if (BouncyCastleProvider.PROVIDER_NAME.equals(provider)) {
87-
Security.addProvider(new BouncyCastleProvider());
88-
}
78+
this.provider = CryptoUtils.getJceProvider(conf);
79+
8980
final String secureRandomAlg =
9081
conf.get(
9182
HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY,
9283
HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_DEFAULT);
9384

9485
try {
95-
random = (provider != null)
86+
random = (provider != null && !provider.isEmpty())
9687
? SecureRandom.getInstance(secureRandomAlg, provider)
9788
: SecureRandom.getInstance(secureRandomAlg);
9889
} catch(GeneralSecurityException e) {

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
import java.io.OutputStreamWriter;
2727
import java.nio.charset.StandardCharsets;
2828
import java.security.NoSuchAlgorithmException;
29-
import java.security.Security;
3029
import java.util.Arrays;
3130
import java.util.Collections;
3231
import java.util.Date;
@@ -35,17 +34,16 @@
3534
import java.util.Map;
3635
import java.util.Objects;
3736

38-
import org.bouncycastle.jce.provider.BouncyCastleProvider;
3937
import com.google.gson.stream.JsonReader;
4038
import com.google.gson.stream.JsonWriter;
4139
import org.apache.hadoop.classification.InterfaceAudience;
4240
import org.apache.hadoop.classification.InterfaceStability;
4341
import org.apache.hadoop.conf.Configuration;
42+
import org.apache.hadoop.crypto.CryptoUtils;
4443
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
4544

4645
import javax.crypto.KeyGenerator;
4746

48-
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY;
4947
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCEKS_KEY_SERIALFILTER;
5048

5149
/**
@@ -410,10 +408,7 @@ public KeyProvider(Configuration conf) {
410408
JCEKS_KEY_SERIALFILTER_DEFAULT);
411409
System.setProperty(JCEKS_KEY_SERIAL_FILTER, serialFilter);
412410
}
413-
String jceProvider = conf.get(HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY);
414-
if (BouncyCastleProvider.PROVIDER_NAME.equals(jceProvider)) {
415-
Security.addProvider(new BouncyCastleProvider());
416-
}
411+
CryptoUtils.getJceProvider(conf);
417412
}
418413

419414
/**
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
/** Crypto related classes. */
20+
package org.apache.hadoop.crypto;
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.fs;
20+
21+
import java.io.Closeable;
22+
import java.io.IOException;
23+
import java.util.Collection;
24+
import java.util.List;
25+
import java.util.Map;
26+
27+
import org.apache.hadoop.classification.InterfaceAudience;
28+
import org.apache.hadoop.classification.InterfaceStability;
29+
import org.apache.hadoop.fs.statistics.IOStatisticsSource;
30+
31+
import static java.util.Objects.requireNonNull;
32+
33+
/**
34+
* API for bulk deletion of objects/files,
35+
* <i>but not directories</i>.
36+
* After use, call {@code close()} to release any resources and
37+
* to guarantee store IOStatistics are updated.
38+
* <p>
39+
* Callers MUST have no expectation that parent directories will exist after the
40+
* operation completes; if an object store needs to explicitly look for and create
41+
* directory markers, that step will be omitted.
42+
* <p>
43+
* Be aware that on some stores (AWS S3) each object listed in a bulk delete counts
44+
* against the write IOPS limit; large page sizes are counterproductive here, as
45+
* are attempts at parallel submissions across multiple threads.
46+
* @see <a href="https://issues.apache.org/jira/browse/HADOOP-16823">HADOOP-16823.
47+
* Large DeleteObject requests are their own Thundering Herd</a>
48+
*/
49+
@InterfaceAudience.Public
50+
@InterfaceStability.Unstable
51+
public interface BulkDelete extends IOStatisticsSource, Closeable {
52+
53+
/**
54+
* The maximum number of objects/files to delete in a single request.
55+
* @return a number greater than zero.
56+
*/
57+
int pageSize();
58+
59+
/**
60+
* Base path of a bulk delete operation.
61+
* All paths submitted in {@link #bulkDelete(Collection)} must be under this path.
62+
* @return base path of a bulk delete operation.
63+
*/
64+
Path basePath();
65+
66+
/**
67+
* Delete a list of files/objects.
68+
* <ul>
69+
* <li>Files must be under the path provided in {@link #basePath()}.</li>
70+
* <li>The size of the list must be equal to or less than the page size
71+
* declared in {@link #pageSize()}.</li>
72+
* <li>Directories are not supported; the outcome of attempting to delete
73+
* directories is undefined (ignored; undetected, listed as failures...).</li>
74+
* <li>The operation is not atomic.</li>
75+
* <li>The operation is treated as idempotent: network failures may
76+
* trigger resubmission of the request -any new objects created under a
77+
* path in the list may then be deleted.</li>
78+
* <li>There is no guarantee that any parent directories exist after this call.
79+
* </li>
80+
* </ul>
81+
* @param paths list of paths which must be absolute and under the base path.
82+
* provided in {@link #basePath()}.
83+
* @return a list of paths which failed to delete, with the exception message.
84+
* @throws IOException IO problems including networking, authentication and more.
85+
* @throws IllegalArgumentException if a path argument is invalid.
86+
*/
87+
List<Map.Entry<Path, String>> bulkDelete(Collection<Path> paths)
88+
throws IOException, IllegalArgumentException;
89+
90+
}

0 commit comments

Comments
 (0)