Skip to content

Commit 30bf42d

Browse files
committed
Merge branch 'trunk' of github.com:apache/hadoop into HADOOP-19218-trunk-2
2 parents 69ebdf6 + b60497f commit 30bf42d

File tree

62 files changed

+4100
-464
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

62 files changed

+4100
-464
lines changed
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.fs;
19+
20+
import org.apache.hadoop.classification.InterfaceAudience;
21+
import org.apache.hadoop.classification.InterfaceStability;
22+
23+
/**
24+
* Exception to denote if the underlying stream, cache or other closable resource
25+
* is closed.
26+
*/
27+
@InterfaceAudience.Public
28+
@InterfaceStability.Unstable
29+
public class ClosedIOException extends PathIOException {
30+
31+
/**
32+
* Appends the custom error-message to the default error message.
33+
* @param path path that encountered the closed resource.
34+
* @param message custom error message.
35+
*/
36+
public ClosedIOException(String path, String message) {
37+
super(path, message);
38+
}
39+
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ public final Map<String, String> getFencingParameters() {
183183
* expose to fencing implementations/scripts. Fencing methods are free
184184
* to use this map as they see fit -- notably, the shell script
185185
* implementation takes each entry, prepends 'target_', substitutes
186-
* '_' for '.', and adds it to the environment of the script.
186+
* '_' for '.' and '-', and adds it to the environment of the script.
187187
*
188188
* Subclass implementations should be sure to delegate to the superclass
189189
* implementation as well as adding their own keys.

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ShellCommandFencer.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@
3939
* (cmd.exe on Windows) and may not include any closing parentheses.<p>
4040
*
4141
* The shell command will be run with an environment set up to contain
42-
* all of the current Hadoop configuration variables, with the '_' character
43-
* replacing any '.' characters in the configuration keys.<p>
42+
* all of the current Hadoop configuration variables, with the '_' character
43+
* replacing any '.' or '-' characters in the configuration keys.<p>
4444
*
4545
* If the shell command returns an exit code of 0, the fencing is
4646
* determined to be successful. If it returns any other exit code, the
@@ -202,11 +202,11 @@ private static String tryGetPid(Process p) {
202202

203203
/**
204204
* Set the environment of the subprocess to be the Configuration,
205-
* with '.'s replaced by '_'s.
205+
* with '.'s and '-'s replaced by '_'s.
206206
*/
207207
private void setConfAsEnvVars(Map<String, String> env) {
208208
for (Map.Entry<String, String> pair : getConf()) {
209-
env.put(pair.getKey().replace('.', '_'), pair.getValue());
209+
env.put(pair.getKey().replaceAll("[.-]", "_"), pair.getValue());
210210
}
211211
}
212212

@@ -237,7 +237,7 @@ private void addTargetInfoAsEnvVars(HAServiceTarget target,
237237
for (Map.Entry<String, String> e :
238238
target.getFencingParameters().entrySet()) {
239239
String key = prefix + e.getKey();
240-
key = key.replace('.', '_');
240+
key = key.replaceAll("[.-]", "_");
241241
environment.put(key, e.getValue());
242242
}
243243
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@
106106
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RPCTraceInfoProto;
107107
import org.apache.hadoop.net.NetUtils;
108108
import org.apache.hadoop.security.AccessControlException;
109+
import org.apache.hadoop.security.SaslConstants;
109110
import org.apache.hadoop.security.SaslPropertiesResolver;
110111
import org.apache.hadoop.security.SaslRpcServer;
111112
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
@@ -2604,7 +2605,8 @@ private RpcSaslProto buildSaslNegotiateResponse()
26042605
RpcSaslProto negotiateMessage = negotiateResponse;
26052606
// accelerate token negotiation by sending initial challenge
26062607
// in the negotiation response
2607-
if (enabledAuthMethods.contains(AuthMethod.TOKEN)) {
2608+
if (enabledAuthMethods.contains(AuthMethod.TOKEN)
2609+
&& SaslConstants.SASL_MECHANISM_DEFAULT.equals(AuthMethod.TOKEN.getMechanismName())) {
26082610
saslServer = createSaslServer(AuthMethod.TOKEN);
26092611
byte[] challenge = saslServer.evaluateResponse(new byte[0]);
26102612
RpcSaslProto.Builder negotiateBuilder =

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslConstants.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ public class SaslConstants {
3232

3333
private static final String SASL_MECHANISM_ENV = "HADOOP_SASL_MECHANISM";
3434
public static final String SASL_MECHANISM;
35-
private static final String SASL_MECHANISM_DEFAULT = "DIGEST-MD5";
35+
public static final String SASL_MECHANISM_DEFAULT = "DIGEST-MD5";
3636

3737
static {
3838
final String mechanism = System.getenv(SASL_MECHANISM_ENV);

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
import javax.security.auth.callback.PasswordCallback;
4040
import javax.security.auth.callback.UnsupportedCallbackException;
4141
import javax.security.auth.kerberos.KerberosPrincipal;
42+
import javax.security.sasl.AuthorizeCallback;
4243
import javax.security.sasl.RealmCallback;
4344
import javax.security.sasl.RealmChoiceCallback;
4445
import javax.security.sasl.Sasl;
@@ -681,9 +682,17 @@ public void handle(Callback[] callbacks)
681682
pc = (PasswordCallback) callback;
682683
} else if (callback instanceof RealmCallback) {
683684
rc = (RealmCallback) callback;
685+
} else if (callback instanceof AuthorizeCallback) {
686+
final AuthorizeCallback ac = (AuthorizeCallback) callback;
687+
final String authId = ac.getAuthenticationID();
688+
final String authzId = ac.getAuthorizationID();
689+
ac.setAuthorized(authId.equals(authzId));
690+
if (ac.isAuthorized()) {
691+
ac.setAuthorizedID(authzId);
692+
}
684693
} else {
685694
throw new UnsupportedCallbackException(callback,
686-
"Unrecognized SASL client callback");
695+
"Unrecognized SASL client callback " + callback.getClass());
687696
}
688697
}
689698
if (nc != null) {

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestShellCommandFencer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ public void resetLogSpy() {
6363

6464
private static ShellCommandFencer createFencer() {
6565
Configuration conf = new Configuration();
66-
conf.set("in.fencing.tests", "yessir");
66+
conf.set("in.fencing-tests", "yessir");
6767
ShellCommandFencer fencer = new ShellCommandFencer();
6868
fencer.setConf(conf);
6969
return fencer;

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,9 @@ public interface HdfsClientConfigKeys {
236236
String DFS_DATA_TRANSFER_SASL_PROPS_RESOLVER_CLASS_KEY =
237237
"dfs.data.transfer.saslproperties.resolver.class";
238238

239+
String DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY
240+
= "dfs.data.transfer.sasl.CustomizedCallbackHandler.class";
241+
239242
String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY =
240243
"dfs.encrypt.data.transfer.cipher.key.bitlength";
241244
int DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_DEFAULT = 128;

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -102,9 +102,9 @@ public static void checkSaslComplete(SaslParticipant sasl,
102102
Set<String> requestedQop = ImmutableSet.copyOf(Arrays.asList(
103103
saslProps.get(Sasl.QOP).split(",")));
104104
String negotiatedQop = sasl.getNegotiatedQop();
105-
LOG.debug("Verifying QOP, requested QOP = {}, negotiated QOP = {}",
106-
requestedQop, negotiatedQop);
107-
if (!requestedQop.contains(negotiatedQop)) {
105+
LOG.debug("{}: Verifying QOP: requested = {}, negotiated = {}",
106+
sasl, requestedQop, negotiatedQop);
107+
if (negotiatedQop != null && !requestedQop.contains(negotiatedQop)) {
108108
throw new IOException(String.format("SASL handshake completed, but " +
109109
"channel does not have acceptable quality of protection, " +
110110
"requested = %s, negotiated = %s", requestedQop, negotiatedQop));
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.protocol.datatransfer.sasl;
19+
20+
import javax.security.auth.callback.Callback;
21+
import javax.security.auth.callback.UnsupportedCallbackException;
22+
import java.io.IOException;
23+
import java.util.List;
24+
25+
/** For handling customized {@link Callback}. */
26+
public interface CustomizedCallbackHandler {
27+
class DefaultHandler implements CustomizedCallbackHandler{
28+
@Override
29+
public void handleCallback(List<Callback> callbacks, String username, char[] password)
30+
throws UnsupportedCallbackException {
31+
if (!callbacks.isEmpty()) {
32+
throw new UnsupportedCallbackException(callbacks.get(0));
33+
}
34+
}
35+
}
36+
37+
void handleCallback(List<Callback> callbacks, String name, char[] password)
38+
throws UnsupportedCallbackException, IOException;
39+
}

0 commit comments

Comments
 (0)