Skip to content

Commit a5eb5e9

Browse files
authored
HDFS-17576. Support user defined auth Callback. (#6945)
1 parent 9dad697 commit a5eb5e9

File tree

6 files changed

+145
-11
lines changed

6 files changed

+145
-11
lines changed

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,9 @@ public interface HdfsClientConfigKeys {
236236
String DFS_DATA_TRANSFER_SASL_PROPS_RESOLVER_CLASS_KEY =
237237
"dfs.data.transfer.saslproperties.resolver.class";
238238

239+
String DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY
240+
= "dfs.data.transfer.sasl.CustomizedCallbackHandler.class";
241+
239242
String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY =
240243
"dfs.encrypt.data.transfer.cipher.key.bitlength";
241244
int DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_DEFAULT = 128;

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -102,9 +102,9 @@ public static void checkSaslComplete(SaslParticipant sasl,
102102
Set<String> requestedQop = ImmutableSet.copyOf(Arrays.asList(
103103
saslProps.get(Sasl.QOP).split(",")));
104104
String negotiatedQop = sasl.getNegotiatedQop();
105-
LOG.debug("Verifying QOP, requested QOP = {}, negotiated QOP = {}",
106-
requestedQop, negotiatedQop);
107-
if (!requestedQop.contains(negotiatedQop)) {
105+
LOG.debug("{}: Verifying QOP: requested = {}, negotiated = {}",
106+
sasl, requestedQop, negotiatedQop);
107+
if (negotiatedQop != null && !requestedQop.contains(negotiatedQop)) {
108108
throw new IOException(String.format("SASL handshake completed, but " +
109109
"channel does not have acceptable quality of protection, " +
110110
"requested = %s, negotiated = %s", requestedQop, negotiatedQop));
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.protocol.datatransfer.sasl;
19+
20+
import javax.security.auth.callback.Callback;
21+
import javax.security.auth.callback.UnsupportedCallbackException;
22+
import java.io.IOException;
23+
import java.util.List;
24+
25+
/** For handling customized {@link Callback}. */
26+
public interface CustomizedCallbackHandler {
27+
class DefaultHandler implements CustomizedCallbackHandler{
28+
@Override
29+
public void handleCallback(List<Callback> callbacks, String username, char[] password)
30+
throws UnsupportedCallbackException {
31+
if (!callbacks.isEmpty()) {
32+
throw new UnsupportedCallbackException(callbacks.get(0));
33+
}
34+
}
35+
}
36+
37+
void handleCallback(List<Callback> callbacks, String name, char[] password)
38+
throws UnsupportedCallbackException, IOException;
39+
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
import java.io.InputStream;
3030
import java.io.OutputStream;
3131
import java.nio.charset.StandardCharsets;
32+
import java.util.ArrayList;
3233
import java.util.List;
3334
import java.util.Map;
3435

@@ -46,6 +47,7 @@
4647
import org.apache.hadoop.classification.InterfaceAudience;
4748
import org.apache.hadoop.conf.Configuration;
4849
import org.apache.hadoop.crypto.CipherOption;
50+
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
4951
import org.apache.hadoop.hdfs.net.Peer;
5052
import org.apache.hadoop.hdfs.protocol.DatanodeID;
5153
import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
@@ -178,7 +180,7 @@ private IOStreamPair getEncryptedStreams(Peer peer,
178180
dnConf.getEncryptionAlgorithm());
179181
}
180182

181-
CallbackHandler callbackHandler = new SaslServerCallbackHandler(
183+
final CallbackHandler callbackHandler = new SaslServerCallbackHandler(dnConf.getConf(),
182184
new PasswordFunction() {
183185
@Override
184186
public char[] apply(String userName) throws IOException {
@@ -195,7 +197,7 @@ public char[] apply(String userName) throws IOException {
195197
* logic. It's similar to a Guava Function, but we need to let it throw
196198
* exceptions.
197199
*/
198-
private interface PasswordFunction {
200+
interface PasswordFunction {
199201

200202
/**
201203
* Returns the SASL password for the given user name.
@@ -210,18 +212,27 @@ private interface PasswordFunction {
210212
/**
211213
* Sets user name and password when asked by the server-side SASL object.
212214
*/
213-
private static final class SaslServerCallbackHandler
215+
static final class SaslServerCallbackHandler
214216
implements CallbackHandler {
215-
216217
private final PasswordFunction passwordFunction;
218+
private final CustomizedCallbackHandler customizedCallbackHandler;
217219

218220
/**
219221
* Creates a new SaslServerCallbackHandler.
220222
*
221223
* @param passwordFunction for determing the user's password
222224
*/
223-
public SaslServerCallbackHandler(PasswordFunction passwordFunction) {
225+
SaslServerCallbackHandler(Configuration conf, PasswordFunction passwordFunction) {
224226
this.passwordFunction = passwordFunction;
227+
228+
final Class<? extends CustomizedCallbackHandler> clazz = conf.getClass(
229+
HdfsClientConfigKeys.DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY,
230+
CustomizedCallbackHandler.DefaultHandler.class, CustomizedCallbackHandler.class);
231+
try {
232+
this.customizedCallbackHandler = clazz.newInstance();
233+
} catch (Exception e) {
234+
throw new IllegalStateException("Failed to create a new instance of " + clazz, e);
235+
}
225236
}
226237

227238
@Override
@@ -230,6 +241,7 @@ public void handle(Callback[] callbacks) throws IOException,
230241
NameCallback nc = null;
231242
PasswordCallback pc = null;
232243
AuthorizeCallback ac = null;
244+
List<Callback> unknownCallbacks = null;
233245
for (Callback callback : callbacks) {
234246
if (callback instanceof AuthorizeCallback) {
235247
ac = (AuthorizeCallback) callback;
@@ -240,8 +252,10 @@ public void handle(Callback[] callbacks) throws IOException,
240252
} else if (callback instanceof RealmCallback) {
241253
continue; // realm is ignored
242254
} else {
243-
throw new UnsupportedCallbackException(callback,
244-
"Unrecognized SASL Callback: " + callback);
255+
if (unknownCallbacks == null) {
256+
unknownCallbacks = new ArrayList<>();
257+
}
258+
unknownCallbacks.add(callback);
245259
}
246260
}
247261

@@ -253,6 +267,12 @@ public void handle(Callback[] callbacks) throws IOException,
253267
ac.setAuthorized(true);
254268
ac.setAuthorizedID(ac.getAuthorizationID());
255269
}
270+
271+
if (unknownCallbacks != null) {
272+
final String name = nc != null ? nc.getDefaultName() : null;
273+
final char[] password = name != null ? passwordFunction.apply(name) : null;
274+
customizedCallbackHandler.handleCallback(unknownCallbacks, name, password);
275+
}
256276
}
257277
}
258278

@@ -298,7 +318,7 @@ private IOStreamPair getSaslStreams(Peer peer, OutputStream underlyingOut,
298318
Map<String, String> saslProps = saslPropsResolver.getServerProperties(
299319
getPeerAddress(peer));
300320

301-
CallbackHandler callbackHandler = new SaslServerCallbackHandler(
321+
final CallbackHandler callbackHandler = new SaslServerCallbackHandler(dnConf.getConf(),
302322
new PasswordFunction() {
303323
@Override
304324
public char[] apply(String userName) throws IOException {

hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2641,6 +2641,15 @@
26412641
</description>
26422642
</property>
26432643

2644+
<property>
2645+
<name>dfs.data.transfer.sasl.CustomizedCallbackHandler.class</name>
2646+
<value></value>
2647+
<description>
2648+
Some security provider may define a new javax.security.auth.callback.Callback.
2649+
This property allows users to configure a customized callback handler.
2650+
</description>
2651+
</property>
2652+
26442653
<property>
26452654
<name>dfs.journalnode.rpc-address</name>
26462655
<value>0.0.0.0:8485</value>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.protocol.datatransfer.sasl;
19+
20+
import org.apache.hadoop.conf.Configuration;
21+
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
22+
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.SaslServerCallbackHandler;
23+
import org.junit.Assert;
24+
import org.junit.Test;
25+
import org.slf4j.Logger;
26+
import org.slf4j.LoggerFactory;
27+
28+
import javax.security.auth.callback.Callback;
29+
import javax.security.auth.callback.UnsupportedCallbackException;
30+
import java.util.Arrays;
31+
import java.util.List;
32+
33+
public class TestCustomizedCallbackHandler {
34+
public static final Logger LOG = LoggerFactory.getLogger(TestCustomizedCallbackHandler.class);
35+
36+
static class MyCallback implements Callback { }
37+
38+
static class MyCallbackHandler implements CustomizedCallbackHandler {
39+
@Override
40+
public void handleCallback(List<Callback> callbacks, String name, char[] password) {
41+
LOG.info("{}: handling {} for {}", getClass().getSimpleName(), callbacks, name);
42+
}
43+
}
44+
45+
@Test
46+
public void testCustomizedCallbackHandler() throws Exception {
47+
final Configuration conf = new Configuration();
48+
final Callback[] callbacks = {new MyCallback()};
49+
50+
// without setting conf, expect UnsupportedCallbackException
51+
try {
52+
new SaslServerCallbackHandler(conf, String::toCharArray).handle(callbacks);
53+
Assert.fail("Expected UnsupportedCallbackException for " + Arrays.asList(callbacks));
54+
} catch (UnsupportedCallbackException e) {
55+
LOG.info("The failure is expected", e);
56+
}
57+
58+
// set conf and expect success
59+
conf.setClass(HdfsClientConfigKeys.DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY,
60+
MyCallbackHandler.class, CustomizedCallbackHandler.class);
61+
new SaslServerCallbackHandler(conf, String::toCharArray).handle(callbacks);
62+
}
63+
}

0 commit comments

Comments
 (0)