Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
2a269f8
YARN-11675. Update MemoryResourceHandler implementation for cgroup v2…
p-szucs Apr 29, 2024
aa605ca
HADOOP-19159. S3A. Fix documentation of fs.s3a.committer.abort.pendin…
jshmchenxi Apr 29, 2024
41d0c62
HADOOP-19150: [ABFS] Fixing Test Code for ITestAbfsRestOperationExcep…
anujmodi2021 Apr 29, 2024
f32b6c3
HADOOP-19151. Support configurable SASL mechanism. (#6740)
szetszwo Apr 29, 2024
9154872
HDFS-17471. Correct the percentage of sample range. (#6742). Contribu…
fuchaohong Apr 30, 2024
37b8251
HDFS-17456. Fix the incorrect dfsused statistics of datanode when app…
fuchaohong Apr 30, 2024
25ae4c6
YARN-11685. Create a config to enable/disable cgroup v2 functionality…
p-szucs Apr 30, 2024
e8b2b89
HADOOP-19146. S3A: noaa-cors-pds test bucket access with global endpo…
virajjasani Apr 30, 2024
62ed382
CachedRecordStore should check if the record state is expired (#6783)
dannytbecker May 1, 2024
3d85185
HADOOP-19160. hadoop-auth should not depend on kerb-simplekdc (#6788)
adoroszlai May 3, 2024
610a3e8
HDFS-17500: Add missing operation name while authorizing some operati…
kulkabhay May 6, 2024
4da8a0a
Revert "HADOOP-18851: Performance improvement for DelegationTokenSecr…
ChenSammi May 7, 2024
fd63a7c
HDFS-17503. Unreleased volume references because of OOM. (#6782)
zhuzilong2013 May 10, 2024
7e1ee72
HDFS-17488. DN can fail IBRs with NPE when a volume is removed (#6759)
kokonguyen191 May 11, 2024
0b29588
Bump org.bouncycastle:bcprov-jdk18on in /hadoop-project (#6811)
dependabot[bot] May 12, 2024
07c08c0
Bump org.apache.derby:derby in /hadoop-project (#6816)
dependabot[bot] May 13, 2024
d323531
YARN-11689. Update the cgroup v2 init error handling (#6810)
brumi1024 May 13, 2024
1538f02
HDFS-17522. JournalNode web interfaces lack configs for X-FRAME-OPTIO…
hiwangzhihui May 13, 2024
1c139e0
MAPREDUCE-7474. Improve Manifest committer resilience (#6716)
steveloughran May 13, 2024
33179eb
HADOOP-19170. Fixes compilation issues on non-Linux systems (#6822)
zhengchenyu May 14, 2024
bf5e237
HDFS-17099. Fix Null Pointer Exception when stop namesystem in HDFS.(…
teamconfx May 14, 2024
1b310a6
HDFS-17514: RBF: Routers should unset cached stateID when namenode do…
simbadzina May 14, 2024
09625a4
HADOOP-19152. Do not hard code security providers. (#6739)
szetszwo May 14, 2024
1c26385
HADOOP-18958. Improve UserGroupInformation debug log. (#6255)
hiwangzhihui May 14, 2024
d9ef9ad
[HADOOP-18786] Use CDN instead of ASF archive (#5789)
ctubbsii May 14, 2024
2ab8f4d
HDFS-17520. [BugFix] TestDFSAdmin.testAllDatanodesReconfig and TestDF…
ZanderXu May 14, 2024
05af8d3
HADOOP-19073 WASB: Fix connection leak in FolderRenamePending (#6534)
xuzifu666 May 15, 2024
ff6081b
HADOOP-19172. S3A: upgrade AWS v1 sdk to 1.12.720 (#6823)
steveloughran May 15, 2024
9056572
YARN-11692. Support mixed cgroup v1/v2 controller structure (#6821)
p-szucs May 15, 2024
c78b367
HADOOP-19013. Adding x-amz-server-side-encryption-aws-kms-key-id in t…
mukund-thakur May 15, 2024
5b9c899
HADOOP-18851. Performance improvement for DelegationTokenSecretManage…
vikaskr22 May 16, 2024
36199b8
HADOOP-19167 Bug Fix: Change of Codec configuration does not work (#6…
skyskyhu May 17, 2024
6313208
HDFS-17509. RBF: Fix ClientProtocol.concat will throw NPE if tgr is a…
LiuGuH May 17, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,6 @@ com.aliyun:aliyun-java-sdk-kms:2.11.0
com.aliyun:aliyun-java-sdk-ram:3.1.0
com.aliyun:aliyun-java-sdk-sts:3.0.0
com.aliyun.oss:aliyun-sdk-oss:3.13.2
com.amazonaws:aws-java-sdk-bundle:1.12.599
com.cedarsoftware:java-util:1.9.0
com.cedarsoftware:json-io:2.5.1
com.fasterxml.jackson.core:jackson-annotations:2.12.7
Expand Down
4 changes: 2 additions & 2 deletions dev-support/bin/yetus-wrapper
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ WANTED="$1"
shift
ARGV=("$@")

HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.14.0}
HADOOP_YETUS_VERSION=${HADOOP_YETUS_VERSION:-0.14.1}
BIN=$(yetus_abs "${BASH_SOURCE-$0}")
BINDIR=$(dirname "${BIN}")

Expand Down Expand Up @@ -123,7 +123,7 @@ fi
## need to DL, etc
##

BASEURL="https://archive.apache.org/dist/yetus/${HADOOP_YETUS_VERSION}/"
BASEURL="https://downloads.apache.org/yetus/${HADOOP_YETUS_VERSION}/"
TARBALL="${YETUS_PREFIX}-${HADOOP_YETUS_VERSION}-bin.tar"

GPGBIN=$(command -v gpg)
Expand Down
6 changes: 3 additions & 3 deletions dev-support/docker/Dockerfile_windows_10
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ RUN powershell Invoke-WebRequest -URI https://cdn.azul.com/zulu/bin/zulu8.62.0.1
RUN powershell Expand-Archive -Path $Env:TEMP\zulu8.62.0.19-ca-jdk8.0.332-win_x64.zip -DestinationPath "C:\Java"

# Install Apache Maven.
RUN powershell Invoke-WebRequest -URI https://archive.apache.org/dist/maven/maven-3/3.8.6/binaries/apache-maven-3.8.6-bin.zip -OutFile $Env:TEMP\apache-maven-3.8.6-bin.zip
RUN powershell Expand-Archive -Path $Env:TEMP\apache-maven-3.8.6-bin.zip -DestinationPath "C:\Maven"
RUN powershell Invoke-WebRequest -URI https://downloads.apache.org/maven/maven-3/3.8.8/binaries/apache-maven-3.8.8-bin.zip -OutFile $Env:TEMP\apache-maven-3.8.8-bin.zip
RUN powershell Expand-Archive -Path $Env:TEMP\apache-maven-3.8.8-bin.zip -DestinationPath "C:\Maven"

# Install CMake 3.19.0.
RUN powershell Invoke-WebRequest -URI https://cmake.org/files/v3.19/cmake-3.19.0-win64-x64.zip -OutFile $Env:TEMP\cmake-3.19.0-win64-x64.zip
Expand Down Expand Up @@ -135,7 +135,7 @@ ENV MAVEN_OPTS '-Xmx2048M -Xss128M'
ENV IS_WINDOWS 1
RUN setx PATH "%PATH%;%ALLUSERSPROFILE%\chocolatey\bin"
RUN setx PATH "%PATH%;%JAVA_HOME%\bin"
RUN setx PATH "%PATH%;C:\Maven\apache-maven-3.8.6\bin"
RUN setx PATH "%PATH%;C:\Maven\apache-maven-3.8.8\bin"
RUN setx PATH "%PATH%;C:\CMake\cmake-3.19.0-win64-x64\bin"
RUN setx PATH "%PATH%;C:\ZStd"
RUN setx PATH "%PATH%;C:\Program Files\Git\usr\bin"
Expand Down
6 changes: 5 additions & 1 deletion hadoop-common-project/hadoop-auth/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,11 @@
</dependency>
<dependency>
<groupId>org.apache.kerby</groupId>
<artifactId>kerb-simplekdc</artifactId>
<artifactId>kerb-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kerby</groupId>
<artifactId>kerb-util</artifactId>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1911,7 +1911,7 @@ function hadoop_start_secure_daemon
if [[ ! -f "${jsvc}" ]]; then
hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
hadoop_error "or privileged daemons. Please download and install jsvc from "
hadoop_error "http://archive.apache.org/dist/commons/daemon/binaries/ "
hadoop_error "https://downloads.apache.org/commons/daemon/binaries/ "
hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
exit 1
fi
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.crypto;

import java.security.Provider;
import java.security.Security;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.store.LogExactlyOnce;

/** Utility methods for the crypto related features. */
@InterfaceAudience.Private
public final class CryptoUtils {
static final Logger LOG = LoggerFactory.getLogger(CryptoUtils.class);
private static final LogExactlyOnce LOG_FAILED_TO_LOAD_CLASS = new LogExactlyOnce(LOG);
private static final LogExactlyOnce LOG_FAILED_TO_ADD_PROVIDER = new LogExactlyOnce(LOG);

private static final String BOUNCY_CASTLE_PROVIDER_CLASS
= "org.bouncycastle.jce.provider.BouncyCastleProvider";
static final String BOUNCY_CASTLE_PROVIDER_NAME = "BC";

/**
* Get the security provider value specified in
* {@link CommonConfigurationKeysPublic#HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY}
* from the given conf.
*
* @param conf the configuration
* @return the configured provider, if there is any; otherwise, return an empty string.
*/
public static String getJceProvider(Configuration conf) {
final String provider = conf.getTrimmed(
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY, "");
final boolean autoAdd = conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_KEY,
CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_DEFAULT);

// For backward compatible, auto-add BOUNCY_CASTLE_PROVIDER_CLASS when the provider is "BC".
if (autoAdd && BOUNCY_CASTLE_PROVIDER_NAME.equals(provider)) {
try {
// Use reflection in order to avoid statically loading the class.
final Class<?> clazz = Class.forName(BOUNCY_CASTLE_PROVIDER_CLASS);
Security.addProvider((Provider) clazz.getConstructor().newInstance());
LOG.debug("Successfully added security provider {}", provider);
if (LOG.isTraceEnabled()) {
LOG.trace("Trace", new Throwable());
}
} catch (ClassNotFoundException e) {
LOG_FAILED_TO_LOAD_CLASS.warn("Failed to load " + BOUNCY_CASTLE_PROVIDER_CLASS, e);
} catch (Exception e) {
LOG_FAILED_TO_ADD_PROVIDER.warn("Failed to add security provider for {}", provider, e);
}
}
return provider;
}

private CryptoUtils() { }
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
*/
package org.apache.hadoop.crypto;

import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
Expand All @@ -27,13 +26,11 @@
import java.nio.ByteBuffer;
import java.security.GeneralSecurityException;
import java.security.SecureRandom;
import java.security.Security;
import javax.crypto.Cipher;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.slf4j.Logger;

import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_DEFAULT;

Expand All @@ -48,10 +45,6 @@ public String getProvider() {
return provider;
}

public void setProvider(String provider) {
this.provider = provider;
}

public void calculateIV(byte[] initIV, long counter,
byte[] iv, int blockSize) {
Preconditions.checkArgument(initIV.length == blockSize);
Expand Down Expand Up @@ -82,17 +75,15 @@ public Configuration getConf() {

public void setConf(Configuration conf) {
this.conf = conf;
setProvider(conf.get(HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY));
if (BouncyCastleProvider.PROVIDER_NAME.equals(provider)) {
Security.addProvider(new BouncyCastleProvider());
}
this.provider = CryptoUtils.getJceProvider(conf);

final String secureRandomAlg =
conf.get(
HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY,
HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_DEFAULT);

try {
random = (provider != null)
random = (provider != null && !provider.isEmpty())
? SecureRandom.getInstance(secureRandomAlg, provider)
: SecureRandom.getInstance(secureRandomAlg);
} catch(GeneralSecurityException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.security.Security;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
Expand All @@ -35,17 +34,16 @@
import java.util.Map;
import java.util.Objects;

import org.bouncycastle.jce.provider.BouncyCastleProvider;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CryptoUtils;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;

import javax.crypto.KeyGenerator;

import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCEKS_KEY_SERIALFILTER;

/**
Expand Down Expand Up @@ -410,10 +408,7 @@ public KeyProvider(Configuration conf) {
JCEKS_KEY_SERIALFILTER_DEFAULT);
System.setProperty(JCEKS_KEY_SERIAL_FILTER, serialFilter);
}
String jceProvider = conf.get(HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY);
if (BouncyCastleProvider.PROVIDER_NAME.equals(jceProvider)) {
Security.addProvider(new BouncyCastleProvider());
}
CryptoUtils.getJceProvider(conf);
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/** Crypto related classes. */
package org.apache.hadoop.crypto;
Original file line number Diff line number Diff line change
Expand Up @@ -773,6 +773,9 @@ public class CommonConfigurationKeysPublic {
*/
public static final String HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY =
"hadoop.security.crypto.jce.provider";
public static final String HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_KEY =
"hadoop.security.crypto.jce.provider.auto-add";
public static final boolean HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_AUTO_ADD_DEFAULT = true;
/**
* @see
* <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/core-default.xml">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;

Expand Down Expand Up @@ -152,6 +153,9 @@ public static Compressor getCompressor(CompressionCodec codec, Configuration con
compressor = codec.createCompressor();
LOG.info("Got brand-new compressor ["+codec.getDefaultExtension()+"]");
} else {
if (conf == null && codec instanceof Configurable) {
conf = ((Configurable)codec).getConf();
}
compressor.reinit(conf);
if(LOG.isDebugEnabled()) {
LOG.debug("Got recycled compressor");
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* SASL related constants.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
public class SaslConstants {
public static final Logger LOG = LoggerFactory.getLogger(SaslConstants.class);

private static final String SASL_MECHANISM_ENV = "HADOOP_SASL_MECHANISM";
public static final String SASL_MECHANISM;
private static final String SASL_MECHANISM_DEFAULT = "DIGEST-MD5";

static {
final String mechanism = System.getenv(SASL_MECHANISM_ENV);
LOG.debug("{} = {} (env)", SASL_MECHANISM_ENV, mechanism);
SASL_MECHANISM = mechanism != null? mechanism : SASL_MECHANISM_DEFAULT;
LOG.debug("{} = {} (effective)", SASL_MECHANISM_ENV, SASL_MECHANISM);
}

private SaslConstants() {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -223,8 +223,8 @@ public enum AuthMethod {
SIMPLE((byte) 80, ""),
KERBEROS((byte) 81, "GSSAPI"),
@Deprecated
DIGEST((byte) 82, "DIGEST-MD5"),
TOKEN((byte) 82, "DIGEST-MD5"),
DIGEST((byte) 82, SaslConstants.SASL_MECHANISM),
TOKEN((byte) 82, SaslConstants.SASL_MECHANISM),
PLAIN((byte) 83, "PLAIN");

/** The code for this method. */
Expand Down Expand Up @@ -273,7 +273,7 @@ public void write(DataOutput out) throws IOException {
}
};

/** CallbackHandler for SASL DIGEST-MD5 mechanism */
/** CallbackHandler for SASL mechanism. */
@InterfaceStability.Evolving
public static class SaslDigestCallbackHandler implements CallbackHandler {
private SecretManager<TokenIdentifier> secretManager;
Expand Down Expand Up @@ -309,7 +309,7 @@ public void handle(Callback[] callbacks) throws InvalidToken,
continue; // realm is ignored
} else {
throw new UnsupportedCallbackException(callback,
"Unrecognized SASL DIGEST-MD5 Callback");
"Unrecognized SASL Callback");
}
}
if (pc != null) {
Expand All @@ -319,11 +319,8 @@ public void handle(Callback[] callbacks) throws InvalidToken,
UserGroupInformation user = null;
user = tokenIdentifier.getUser(); // may throw exception
connection.attemptingUser = user;

if (LOG.isDebugEnabled()) {
LOG.debug("SASL server DIGEST-MD5 callback: setting password "
+ "for client: " + tokenIdentifier.getUser());
}

LOG.debug("SASL server callback: setting password for client: {}", user);
pc.setPassword(password);
}
if (ac != null) {
Expand All @@ -339,8 +336,7 @@ public void handle(Callback[] callbacks) throws InvalidToken,
UserGroupInformation logUser =
getIdentifier(authzid, secretManager).getUser();
String username = logUser == null ? null : logUser.getUserName();
LOG.debug("SASL server DIGEST-MD5 callback: setting "
+ "canonicalized client ID: " + username);
LOG.debug("SASL server callback: setting authorizedID: {}", username);
}
ac.setAuthorizedID(authzid);
}
Expand Down
Loading