From a5eb5e961174dffb53f20633280eecddc02a4aca Mon Sep 17 00:00:00 2001 From: Tsz-Wo Nicholas Sze Date: Sat, 20 Jul 2024 15:21:06 +0800 Subject: [PATCH] HDFS-17576. Support user defined auth Callback. (#6945) --- .../hdfs/client/HdfsClientConfigKeys.java | 3 + .../sasl/DataTransferSaslUtil.java | 6 +- .../sasl/CustomizedCallbackHandler.java | 39 ++++++++++++ .../sasl/SaslDataTransferServer.java | 36 ++++++++--- .../src/main/resources/hdfs-default.xml | 9 +++ .../sasl/TestCustomizedCallbackHandler.java | 63 +++++++++++++++++++ 6 files changed, 145 insertions(+), 11 deletions(-) create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/CustomizedCallbackHandler.java create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestCustomizedCallbackHandler.java diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java index 2044530506..e951b1d7d7 100755 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsClientConfigKeys.java @@ -236,6 +236,9 @@ public interface HdfsClientConfigKeys { String DFS_DATA_TRANSFER_SASL_PROPS_RESOLVER_CLASS_KEY = "dfs.data.transfer.saslproperties.resolver.class"; + String DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY + = "dfs.data.transfer.sasl.CustomizedCallbackHandler.class"; + String DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY = "dfs.encrypt.data.transfer.cipher.key.bitlength"; int DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_DEFAULT = 128; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java index 4749890ce1..e4ae936b4f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java @@ -102,9 +102,9 @@ public static void checkSaslComplete(SaslParticipant sasl, Set requestedQop = ImmutableSet.copyOf(Arrays.asList( saslProps.get(Sasl.QOP).split(","))); String negotiatedQop = sasl.getNegotiatedQop(); - LOG.debug("Verifying QOP, requested QOP = {}, negotiated QOP = {}", - requestedQop, negotiatedQop); - if (!requestedQop.contains(negotiatedQop)) { + LOG.debug("{}: Verifying QOP: requested = {}, negotiated = {}", + sasl, requestedQop, negotiatedQop); + if (negotiatedQop != null && !requestedQop.contains(negotiatedQop)) { throw new IOException(String.format("SASL handshake completed, but " + "channel does not have acceptable quality of protection, " + "requested = %s, negotiated = %s", requestedQop, negotiatedQop)); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/CustomizedCallbackHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/CustomizedCallbackHandler.java new file mode 100644 index 0000000000..eff093490b --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/CustomizedCallbackHandler.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.protocol.datatransfer.sasl; + +import javax.security.auth.callback.Callback; +import javax.security.auth.callback.UnsupportedCallbackException; +import java.io.IOException; +import java.util.List; + +/** For handling customized {@link Callback}. */ +public interface CustomizedCallbackHandler { + class DefaultHandler implements CustomizedCallbackHandler{ + @Override + public void handleCallback(List callbacks, String username, char[] password) + throws UnsupportedCallbackException { + if (!callbacks.isEmpty()) { + throw new UnsupportedCallbackException(callbacks.get(0)); + } + } + } + + void handleCallback(List callbacks, String name, char[] password) + throws UnsupportedCallbackException, IOException; +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java index adf3a99634..ae79800b3e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java @@ -29,6 +29,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -46,6 +47,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CipherOption; +import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.net.Peer; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; @@ -178,7 +180,7 @@ private IOStreamPair getEncryptedStreams(Peer peer, dnConf.getEncryptionAlgorithm()); } - CallbackHandler callbackHandler = new SaslServerCallbackHandler( + final CallbackHandler callbackHandler = new SaslServerCallbackHandler(dnConf.getConf(), new PasswordFunction() { @Override public char[] apply(String userName) throws IOException { @@ -195,7 +197,7 @@ public char[] apply(String userName) throws IOException { * logic. It's similar to a Guava Function, but we need to let it throw * exceptions. */ - private interface PasswordFunction { + interface PasswordFunction { /** * Returns the SASL password for the given user name. @@ -210,18 +212,27 @@ private interface PasswordFunction { /** * Sets user name and password when asked by the server-side SASL object. */ - private static final class SaslServerCallbackHandler + static final class SaslServerCallbackHandler implements CallbackHandler { - private final PasswordFunction passwordFunction; + private final CustomizedCallbackHandler customizedCallbackHandler; /** * Creates a new SaslServerCallbackHandler. * * @param passwordFunction for determing the user's password */ - public SaslServerCallbackHandler(PasswordFunction passwordFunction) { + SaslServerCallbackHandler(Configuration conf, PasswordFunction passwordFunction) { this.passwordFunction = passwordFunction; + + final Class clazz = conf.getClass( + HdfsClientConfigKeys.DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY, + CustomizedCallbackHandler.DefaultHandler.class, CustomizedCallbackHandler.class); + try { + this.customizedCallbackHandler = clazz.newInstance(); + } catch (Exception e) { + throw new IllegalStateException("Failed to create a new instance of " + clazz, e); + } } @Override @@ -230,6 +241,7 @@ public void handle(Callback[] callbacks) throws IOException, NameCallback nc = null; PasswordCallback pc = null; AuthorizeCallback ac = null; + List unknownCallbacks = null; for (Callback callback : callbacks) { if (callback instanceof AuthorizeCallback) { ac = (AuthorizeCallback) callback; @@ -240,8 +252,10 @@ public void handle(Callback[] callbacks) throws IOException, } else if (callback instanceof RealmCallback) { continue; // realm is ignored } else { - throw new UnsupportedCallbackException(callback, - "Unrecognized SASL Callback: " + callback); + if (unknownCallbacks == null) { + unknownCallbacks = new ArrayList<>(); + } + unknownCallbacks.add(callback); } } @@ -253,6 +267,12 @@ public void handle(Callback[] callbacks) throws IOException, ac.setAuthorized(true); ac.setAuthorizedID(ac.getAuthorizationID()); } + + if (unknownCallbacks != null) { + final String name = nc != null ? nc.getDefaultName() : null; + final char[] password = name != null ? passwordFunction.apply(name) : null; + customizedCallbackHandler.handleCallback(unknownCallbacks, name, password); + } } } @@ -298,7 +318,7 @@ private IOStreamPair getSaslStreams(Peer peer, OutputStream underlyingOut, Map saslProps = saslPropsResolver.getServerProperties( getPeerAddress(peer)); - CallbackHandler callbackHandler = new SaslServerCallbackHandler( + final CallbackHandler callbackHandler = new SaslServerCallbackHandler(dnConf.getConf(), new PasswordFunction() { @Override public char[] apply(String userName) throws IOException { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml index 94c3ea0cc9..d6fefa4e93 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml @@ -2641,6 +2641,15 @@ + + dfs.data.transfer.sasl.CustomizedCallbackHandler.class + + + Some security provider may define a new javax.security.auth.callback.Callback. + This property allows users to configure a customized callback handler. + + + dfs.journalnode.rpc-address 0.0.0.0:8485 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestCustomizedCallbackHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestCustomizedCallbackHandler.java new file mode 100644 index 0000000000..88d1d66bc4 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestCustomizedCallbackHandler.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.protocol.datatransfer.sasl; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; +import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferServer.SaslServerCallbackHandler; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.security.auth.callback.Callback; +import javax.security.auth.callback.UnsupportedCallbackException; +import java.util.Arrays; +import java.util.List; + +public class TestCustomizedCallbackHandler { + public static final Logger LOG = LoggerFactory.getLogger(TestCustomizedCallbackHandler.class); + + static class MyCallback implements Callback { } + + static class MyCallbackHandler implements CustomizedCallbackHandler { + @Override + public void handleCallback(List callbacks, String name, char[] password) { + LOG.info("{}: handling {} for {}", getClass().getSimpleName(), callbacks, name); + } + } + + @Test + public void testCustomizedCallbackHandler() throws Exception { + final Configuration conf = new Configuration(); + final Callback[] callbacks = {new MyCallback()}; + + // without setting conf, expect UnsupportedCallbackException + try { + new SaslServerCallbackHandler(conf, String::toCharArray).handle(callbacks); + Assert.fail("Expected UnsupportedCallbackException for " + Arrays.asList(callbacks)); + } catch (UnsupportedCallbackException e) { + LOG.info("The failure is expected", e); + } + + // set conf and expect success + conf.setClass(HdfsClientConfigKeys.DFS_DATA_TRANSFER_SASL_CUSTOMIZEDCALLBACKHANDLER_CLASS_KEY, + MyCallbackHandler.class, CustomizedCallbackHandler.class); + new SaslServerCallbackHandler(conf, String::toCharArray).handle(callbacks); + } +}