Skip to content

Commit d7bbdd6

Browse files
szetszwosteveloughran
authored andcommitted
CDPD-73325: HDFS-17576. Support user defined auth Callback. (apache#6945)
1 parent 0fa0528 commit d7bbdd6

File tree

6 files changed

+145
-11
lines changed

6 files changed

+145
-11
lines changed

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,9 @@ public interface HdfsClientConfigKeys {
236236
String DFS_DATA_TRANSFER_SASL_PROPS_RESOLVER_CLASS_KEY =
237237
"dfs.data.transfer.saslproperties.resolver.class";
238238

239+
String DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY
240+
= "dfs.data.transfer.sasl.CustomizedCallbackHandler.class";
241+
239242
String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY =
240243
"dfs.encrypt.data.transfer.cipher.key.bitlength";
241244
int DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_DEFAULT = 128;

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -102,9 +102,9 @@ public static void checkSaslComplete(SaslParticipant sasl,
102102
Set<String> requestedQop = ImmutableSet.copyOf(Arrays.asList(
103103
saslProps.get(Sasl.QOP).split(",")));
104104
String negotiatedQop = sasl.getNegotiatedQop();
105-
LOG.debug("Verifying QOP, requested QOP = {}, negotiated QOP = {}",
106-
requestedQop, negotiatedQop);
107-
if (!requestedQop.contains(negotiatedQop)) {
105+
LOG.debug("{}: Verifying QOP: requested = {}, negotiated = {}",
106+
sasl, requestedQop, negotiatedQop);
107+
if (negotiatedQop != null && !requestedQop.contains(negotiatedQop)) {
108108
throw new IOException(String.format("SASL handshake completed, but " +
109109
"channel does not have acceptable quality of protection, " +
110110
"requested = %s, negotiated = %s", requestedQop, negotiatedQop));
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.protocol.datatransfer.sasl;
19+
20+
import javax.security.auth.callback.Callback;
21+
import javax.security.auth.callback.UnsupportedCallbackException;
22+
import java.io.IOException;
23+
import java.util.List;
24+
25+
/** For handling customized {@link Callback}. */
26+
public interface CustomizedCallbackHandler {
27+
class DefaultHandler implements CustomizedCallbackHandler{
28+
@Override
29+
public void handleCallback(List<Callback> callbacks, String username, char[] password)
30+
throws UnsupportedCallbackException {
31+
if (!callbacks.isEmpty()) {
32+
throw new UnsupportedCallbackException(callbacks.get(0));
33+
}
34+
}
35+
}
36+
37+
void handleCallback(List<Callback> callbacks, String name, char[] password)
38+
throws UnsupportedCallbackException, IOException;
39+
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
import java.io.InputStream;
3030
import java.io.OutputStream;
3131
import java.nio.charset.StandardCharsets;
32+
import java.util.ArrayList;
3233
import java.util.List;
3334
import java.util.Map;
3435
import java.util.TreeMap;
@@ -47,6 +48,7 @@
4748
import org.apache.hadoop.classification.InterfaceAudience;
4849
import org.apache.hadoop.conf.Configuration;
4950
import org.apache.hadoop.crypto.CipherOption;
51+
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
5052
import org.apache.hadoop.hdfs.net.Peer;
5153
import org.apache.hadoop.hdfs.protocol.DatanodeID;
5254
import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair;
@@ -179,7 +181,7 @@ private IOStreamPair getEncryptedStreams(Peer peer,
179181
dnConf.getEncryptionAlgorithm());
180182
}
181183

182-
CallbackHandler callbackHandler = new SaslServerCallbackHandler(
184+
final CallbackHandler callbackHandler = new SaslServerCallbackHandler(dnConf.getConf(),
183185
new PasswordFunction() {
184186
@Override
185187
public char[] apply(String userName) throws IOException {
@@ -196,7 +198,7 @@ public char[] apply(String userName) throws IOException {
196198
* logic. It's similar to a Guava Function, but we need to let it throw
197199
* exceptions.
198200
*/
199-
private interface PasswordFunction {
201+
interface PasswordFunction {
200202

201203
/**
202204
* Returns the SASL password for the given user name.
@@ -211,18 +213,27 @@ private interface PasswordFunction {
211213
/**
212214
* Sets user name and password when asked by the server-side SASL object.
213215
*/
214-
private static final class SaslServerCallbackHandler
216+
static final class SaslServerCallbackHandler
215217
implements CallbackHandler {
216-
217218
private final PasswordFunction passwordFunction;
219+
private final CustomizedCallbackHandler customizedCallbackHandler;
218220

219221
/**
220222
* Creates a new SaslServerCallbackHandler.
221223
*
222224
* @param passwordFunction for determing the user's password
223225
*/
224-
public SaslServerCallbackHandler(PasswordFunction passwordFunction) {
226+
SaslServerCallbackHandler(Configuration conf, PasswordFunction passwordFunction) {
225227
this.passwordFunction = passwordFunction;
228+
229+
final Class<? extends CustomizedCallbackHandler> clazz = conf.getClass(
230+
HdfsClientConfigKeys.DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY,
231+
CustomizedCallbackHandler.DefaultHandler.class, CustomizedCallbackHandler.class);
232+
try {
233+
this.customizedCallbackHandler = clazz.newInstance();
234+
} catch (Exception e) {
235+
throw new IllegalStateException("Failed to create a new instance of " + clazz, e);
236+
}
226237
}
227238

228239
@Override
@@ -231,6 +242,7 @@ public void handle(Callback[] callbacks) throws IOException,
231242
NameCallback nc = null;
232243
PasswordCallback pc = null;
233244
AuthorizeCallback ac = null;
245+
List<Callback> unknownCallbacks = null;
234246
for (Callback callback : callbacks) {
235247
if (callback instanceof AuthorizeCallback) {
236248
ac = (AuthorizeCallback) callback;
@@ -241,8 +253,10 @@ public void handle(Callback[] callbacks) throws IOException,
241253
} else if (callback instanceof RealmCallback) {
242254
continue; // realm is ignored
243255
} else {
244-
throw new UnsupportedCallbackException(callback,
245-
"Unrecognized SASL Callback: " + callback);
256+
if (unknownCallbacks == null) {
257+
unknownCallbacks = new ArrayList<>();
258+
}
259+
unknownCallbacks.add(callback);
246260
}
247261
}
248262

@@ -254,6 +268,12 @@ public void handle(Callback[] callbacks) throws IOException,
254268
ac.setAuthorized(true);
255269
ac.setAuthorizedID(ac.getAuthorizationID());
256270
}
271+
272+
if (unknownCallbacks != null) {
273+
final String name = nc != null ? nc.getDefaultName() : null;
274+
final char[] password = name != null ? passwordFunction.apply(name) : null;
275+
customizedCallbackHandler.handleCallback(unknownCallbacks, name, password);
276+
}
257277
}
258278
}
259279

@@ -299,7 +319,7 @@ private IOStreamPair getSaslStreams(Peer peer, OutputStream underlyingOut,
299319
Map<String, String> saslProps = saslPropsResolver.getServerProperties(
300320
getPeerAddress(peer));
301321

302-
CallbackHandler callbackHandler = new SaslServerCallbackHandler(
322+
final CallbackHandler callbackHandler = new SaslServerCallbackHandler(dnConf.getConf(),
303323
new PasswordFunction() {
304324
@Override
305325
public char[] apply(String userName) throws IOException {

hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2634,6 +2634,15 @@
26342634
</description>
26352635
</property>
26362636

2637+
<property>
2638+
<name>dfs.data.transfer.sasl.CustomizedCallbackHandler.class</name>
2639+
<value></value>
2640+
<description>
2641+
Some security provider may define a new javax.security.auth.callback.Callback.
2642+
This property allows users to configure a customized callback handler.
2643+
</description>
2644+
</property>
2645+
26372646
<property>
26382647
<name>dfs.journalnode.rpc-address</name>
26392648
<value>0.0.0.0:8485</value>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hdfs.protocol.datatransfer.sasl;
19+
20+
import org.apache.hadoop.conf.Configuration;
21+
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
22+
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.SaslServerCallbackHandler;
23+
import org.junit.Assert;
24+
import org.junit.Test;
25+
import org.slf4j.Logger;
26+
import org.slf4j.LoggerFactory;
27+
28+
import javax.security.auth.callback.Callback;
29+
import javax.security.auth.callback.UnsupportedCallbackException;
30+
import java.util.Arrays;
31+
import java.util.List;
32+
33+
public class TestCustomizedCallbackHandler {
34+
public static final Logger LOG = LoggerFactory.getLogger(TestCustomizedCallbackHandler.class);
35+
36+
static class MyCallback implements Callback { }
37+
38+
static class MyCallbackHandler implements CustomizedCallbackHandler {
39+
@Override
40+
public void handleCallback(List<Callback> callbacks, String name, char[] password) {
41+
LOG.info("{}: handling {} for {}", getClass().getSimpleName(), callbacks, name);
42+
}
43+
}
44+
45+
@Test
46+
public void testCustomizedCallbackHandler() throws Exception {
47+
final Configuration conf = new Configuration();
48+
final Callback[] callbacks = {new MyCallback()};
49+
50+
// without setting conf, expect UnsupportedCallbackException
51+
try {
52+
new SaslServerCallbackHandler(conf, String::toCharArray).handle(callbacks);
53+
Assert.fail("Expected UnsupportedCallbackException for " + Arrays.asList(callbacks));
54+
} catch (UnsupportedCallbackException e) {
55+
LOG.info("The failure is expected", e);
56+
}
57+
58+
// set conf and expect success
59+
conf.setClass(HdfsClientConfigKeys.DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY,
60+
MyCallbackHandler.class, CustomizedCallbackHandler.class);
61+
new SaslServerCallbackHandler(conf, String::toCharArray).handle(callbacks);
62+
}
63+
}

0 commit comments

Comments
 (0)