diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java index 3742eb8118a1..60f4b7932bfc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; +import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.PoolMap; @@ -116,6 +117,8 @@ public abstract class AbstractRpcClient implements RpcC protected final UserProvider userProvider; protected final CellBlockBuilder cellBlockBuilder; + protected final SaslClientAuthenticationProviders providers; + protected final int minIdleTimeBeforeClose; // if the connection is idle for more than this // time (in ms), it will be closed at any moment. protected final int maxRetries; // the max. no. of retries for socket connections @@ -184,6 +187,8 @@ public AbstractRpcClient(Configuration conf, String clusterId, SocketAddress loc conf.getInt(HConstants.HBASE_CLIENT_PERSERVER_REQUESTS_THRESHOLD, HConstants.DEFAULT_HBASE_CLIENT_PERSERVER_REQUESTS_THRESHOLD); + this.providers = new SaslClientAuthenticationProviders(conf); + this.connections = new PoolMap<>(getPoolType(conf), getPoolSize(conf)); this.cleanupIdleConnectionTask = IDLE_CONN_SWEEPER.scheduleAtFixedRate(new Runnable() { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java index fe46c97a6539..c52a00cd81e3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java @@ -220,8 +220,9 @@ public void cleanup(IOException e) { BlockingRpcConnection(BlockingRpcClient rpcClient, ConnectionId remoteId) throws IOException { super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId, rpcClient.clusterId, - rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor, - rpcClient.cellBlockBuilder, rpcClient.metrics, rpcClient.connectionAttributes); + rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.providers, rpcClient.codec, + rpcClient.compressor, rpcClient.cellBlockBuilder, rpcClient.metrics, + rpcClient.connectionAttributes); this.rpcClient = rpcClient; this.connectionHeaderPreamble = getConnectionHeaderPreamble(); ConnectionHeader header = getConnectionHeader(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java index b7856c089133..d65c19e34edd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcConnection.java @@ -106,8 +106,9 @@ class NettyRpcConnection extends RpcConnection { NettyRpcConnection(NettyRpcClient rpcClient, ConnectionId remoteId) throws IOException { super(rpcClient.conf, AbstractRpcClient.WHEEL_TIMER, remoteId, rpcClient.clusterId, - rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.codec, rpcClient.compressor, - rpcClient.cellBlockBuilder, rpcClient.metrics, rpcClient.connectionAttributes); + rpcClient.userProvider.isHBaseSecurityEnabled(), rpcClient.providers, rpcClient.codec, + rpcClient.compressor, rpcClient.cellBlockBuilder, rpcClient.metrics, + rpcClient.connectionAttributes); this.rpcClient = rpcClient; this.eventLoop = rpcClient.group.next(); byte[] connectionHeaderPreamble = getConnectionHeaderPreamble(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java index 8017e99ec4ff..2ec5a7ee1a39 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java @@ -119,9 +119,9 @@ abstract class RpcConnection { private String lastSucceededServerPrincipal; protected RpcConnection(Configuration conf, HashedWheelTimer timeoutTimer, ConnectionId remoteId, - String clusterId, boolean isSecurityEnabled, Codec codec, CompressionCodec compressor, - CellBlockBuilder cellBlockBuilder, MetricsConnection metrics, - Map connectionAttributes) throws IOException { + String clusterId, boolean isSecurityEnabled, SaslClientAuthenticationProviders providers, + Codec codec, CompressionCodec compressor, CellBlockBuilder cellBlockBuilder, + MetricsConnection metrics, Map connectionAttributes) throws IOException { this.timeoutTimer = timeoutTimer; this.codec = codec; this.compressor = compressor; @@ -134,8 +134,6 @@ protected RpcConnection(Configuration conf, HashedWheelTimer timeoutTimer, Conne this.useSasl = isSecurityEnabled; // Choose the correct Token and AuthenticationProvider for this client to use - SaslClientAuthenticationProviders providers = - SaslClientAuthenticationProviders.getInstance(conf); Pair> pair; if (useSasl && securityInfo != null) { pair = providers.selectProvider(clusterId, ticket); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/AuthenticationProviderSelector.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/AuthenticationProviderSelector.java index cdd1fdb381f6..4653217b90d2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/AuthenticationProviderSelector.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/AuthenticationProviderSelector.java @@ -32,9 +32,10 @@ public interface AuthenticationProviderSelector { /** - * Initializes the implementation with configuration and a set of providers available. This method - * should be called exactly once per implementation prior to calling - * {@link #selectProvider(String, User)}. + * Initializes the implementation with configuration and a set of providers available. + *

+ * This method is called once upon construction, before {@link #selectProvider(String, User)} is + * invoked. */ void configure(Configuration conf, Collection availableProviders); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProviders.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProviders.java index a78ff3386a46..a0ad1b405d0b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProviders.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/provider/SaslClientAuthenticationProviders.java @@ -48,16 +48,36 @@ public final class SaslClientAuthenticationProviders { public static final String SELECTOR_KEY = "hbase.client.sasl.provider.class"; public static final String EXTRA_PROVIDERS_KEY = "hbase.client.sasl.provider.extras"; - private static final AtomicReference providersRef = + private static final AtomicReference PROVIDER_REF = new AtomicReference<>(); private final Collection providers; private final AuthenticationProviderSelector selector; - private SaslClientAuthenticationProviders(Collection providers, - AuthenticationProviderSelector selector) { - this.providers = providers; - this.selector = selector; + /** + * Creates a new instance of SaslClientAuthenticationProviders. + * @param conf the configuration to use for loading providers and selector + */ + public SaslClientAuthenticationProviders(Configuration conf) { + ServiceLoader loader = + ServiceLoader.load(SaslClientAuthenticationProvider.class, + SaslClientAuthenticationProviders.class.getClassLoader()); + HashMap providerMap = new HashMap<>(); + for (SaslClientAuthenticationProvider provider : loader) { + addProviderIfNotExists(provider, providerMap); + } + + addExplicitProviders(conf, providerMap); + + providers = Collections.unmodifiableCollection(providerMap.values()); + + if (LOG.isTraceEnabled()) { + String loadedProviders = providers.stream().map((provider) -> provider.getClass().getName()) + .collect(Collectors.joining(", ")); + LOG.trace("Found SaslClientAuthenticationProviders {}", loadedProviders); + } + + selector = instantiateSelector(conf, providers); } /** @@ -69,12 +89,16 @@ public int getNumRegisteredProviders() { /** * Returns a singleton instance of {@link SaslClientAuthenticationProviders}. + * @deprecated Since 2.5.14 and 2.6.4, will be removed in newer minor release lines. This class + * should not be singleton, please do not use it any more. see HBASE-29144 for more + * details. */ + @Deprecated public static synchronized SaslClientAuthenticationProviders getInstance(Configuration conf) { - SaslClientAuthenticationProviders providers = providersRef.get(); + SaslClientAuthenticationProviders providers = PROVIDER_REF.get(); if (providers == null) { - providers = instantiate(conf); - providersRef.set(providers); + providers = new SaslClientAuthenticationProviders(conf); + PROVIDER_REF.set(providers); } return providers; @@ -82,9 +106,13 @@ public static synchronized SaslClientAuthenticationProviders getInstance(Configu /** * Removes the cached singleton instance of {@link SaslClientAuthenticationProviders}. + * @deprecated Since 2.5.14 and 2.6.4, will be removed in newer minor release lines. This class + * should not be singleton, please do not use it any more. see HBASE-29144 for more + * details. */ + @Deprecated public static synchronized void reset() { - providersRef.set(null); + PROVIDER_REF.set(null); } /** @@ -93,7 +121,7 @@ public static synchronized void reset() { */ static void addProviderIfNotExists(SaslClientAuthenticationProvider provider, HashMap providers) { - Byte code = provider.getSaslAuthMethod().getCode(); + byte code = provider.getSaslAuthMethod().getCode(); SaslClientAuthenticationProvider existingProvider = providers.get(code); if (existingProvider != null) { throw new RuntimeException("Already registered authentication provider with " + code + " " @@ -105,7 +133,7 @@ static void addProviderIfNotExists(SaslClientAuthenticationProvider provider, /** * Instantiates the ProviderSelector implementation from the provided configuration. */ - static AuthenticationProviderSelector instantiateSelector(Configuration conf, + private static AuthenticationProviderSelector instantiateSelector(Configuration conf, Collection providers) { Class clz = conf.getClass(SELECTOR_KEY, BuiltInProviderSelector.class, AuthenticationProviderSelector.class); @@ -161,34 +189,6 @@ static void addExplicitProviders(Configuration conf, } } - /** - * Instantiates all client authentication providers and returns an instance of - * {@link SaslClientAuthenticationProviders}. - */ - static SaslClientAuthenticationProviders instantiate(Configuration conf) { - ServiceLoader loader = - ServiceLoader.load(SaslClientAuthenticationProvider.class, - SaslClientAuthenticationProviders.class.getClassLoader()); - HashMap providerMap = new HashMap<>(); - for (SaslClientAuthenticationProvider provider : loader) { - addProviderIfNotExists(provider, providerMap); - } - - addExplicitProviders(conf, providerMap); - - Collection providers = - Collections.unmodifiableCollection(providerMap.values()); - - if (LOG.isTraceEnabled()) { - String loadedProviders = providers.stream().map((provider) -> provider.getClass().getName()) - .collect(Collectors.joining(", ")); - LOG.trace("Found SaslClientAuthenticationProviders {}", loadedProviders); - } - - AuthenticationProviderSelector selector = instantiateSelector(conf, providers); - return new SaslClientAuthenticationProviders(providers, selector); - } - /** * Returns the provider and token pair for SIMPLE authentication. This method is a "hack" while * SIMPLE authentication for HBase does not flow through the SASL codepath. diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslClientAuthenticationProviders.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslClientAuthenticationProviders.java index 029c880600b4..9b4fdef06133 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslClientAuthenticationProviders.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslClientAuthenticationProviders.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.security.provider; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertSame; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.IOException; import java.net.InetAddress; @@ -27,7 +27,6 @@ import java.util.Map; import javax.security.sasl.SaslClient; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.security.SecurityInfo; import org.apache.hadoop.hbase.security.User; @@ -36,19 +35,15 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation; -@Category({ SmallTests.class, SecurityTests.class }) +@Tag(SmallTests.TAG) +@Tag(SecurityTests.TAG) public class TestSaslClientAuthenticationProviders { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSaslClientAuthenticationProviders.class); - @Test public void testCannotAddTheSameProviderTwice() { HashMap registeredProviders = new HashMap<>(); @@ -58,38 +53,19 @@ public void testCannotAddTheSameProviderTwice() { SaslClientAuthenticationProviders.addProviderIfNotExists(p1, registeredProviders); assertEquals(1, registeredProviders.size()); - try { - SaslClientAuthenticationProviders.addProviderIfNotExists(p2, registeredProviders); - } catch (RuntimeException e) { - } + assertThrows(RuntimeException.class, + () -> SaslClientAuthenticationProviders.addProviderIfNotExists(p2, registeredProviders)); - assertSame("Expected the original provider to be present", p1, - registeredProviders.entrySet().iterator().next().getValue()); + assertSame(p1, registeredProviders.entrySet().iterator().next().getValue(), + "Expected the original provider to be present"); } @Test - public void testInstanceIsCached() { - Configuration conf = HBaseConfiguration.create(); - SaslClientAuthenticationProviders providers1 = - SaslClientAuthenticationProviders.getInstance(conf); - SaslClientAuthenticationProviders providers2 = - SaslClientAuthenticationProviders.getInstance(conf); - assertSame(providers1, providers2); - - SaslClientAuthenticationProviders.reset(); - - SaslClientAuthenticationProviders providers3 = - SaslClientAuthenticationProviders.getInstance(conf); - assertNotSame(providers1, providers3); - assertEquals(providers1.getNumRegisteredProviders(), providers3.getNumRegisteredProviders()); - } - - @Test(expected = RuntimeException.class) public void testDifferentConflictingImplementationsFail() { Configuration conf = HBaseConfiguration.create(); conf.setStrings(SaslClientAuthenticationProviders.EXTRA_PROVIDERS_KEY, ConflictingProvider1.class.getName(), ConflictingProvider2.class.getName()); - SaslClientAuthenticationProviders.getInstance(conf); + assertThrows(RuntimeException.class, () -> new SaslClientAuthenticationProviders(conf)); } static class ConflictingProvider1 implements SaslClientAuthenticationProvider { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java index de17960d151e..1df5b22a7029 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java @@ -23,6 +23,8 @@ import org.junit.jupiter.api.extension.ParameterResolver; import org.junit.jupiter.params.provider.Arguments; +import org.apache.hbase.thirdparty.com.google.common.primitives.Primitives; + /** * @see HBaseParameterizedTestTemplate */ @@ -38,8 +40,20 @@ public class HBaseParameterizedParameterResolver implements ParameterResolver { public boolean supportsParameter(ParameterContext pc, ExtensionContext ec) throws ParameterResolutionException { int index = pc.getIndex(); - return index < values.length - && pc.getParameter().getType().isAssignableFrom(values[index].getClass()); + if (index >= values.length) { + return false; + } + Object value = values[index]; + Class expectedType = pc.getParameter().getType(); + if (expectedType.isPrimitive()) { + // primitive type can not accept null value + if (value == null) { + return false; + } + // test with wrapper type, otherwise it will always return false + return Primitives.wrap(expectedType).isAssignableFrom(value.getClass()); + } + return expectedType.isAssignableFrom(value.getClass()); } @Override diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java index f661025ac062..9ac11c5849a8 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceUtil.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders; import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -43,7 +42,6 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.After; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -59,11 +57,6 @@ public class TestTableMapReduceUtil { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestTableMapReduceUtil.class); - @After - public void after() { - SaslClientAuthenticationProviders.reset(); - } - /* * initTableSnapshotMapperJob is tested in {@link TestTableSnapshotInputFormat} because the method * depends on an online cluster. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index fc6d9ea7611a..c00717c69c8c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; +import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders; import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.GsonUtil; @@ -97,6 +98,7 @@ public abstract class RpcServer implements RpcServerInterface, ConfigurationObse private final boolean authorize; private volatile boolean isOnlineLogProviderEnabled; protected boolean isSecurityEnabled; + protected final SaslServerAuthenticationProviders saslProviders; public static final byte CURRENT_VERSION = 0; @@ -309,6 +311,7 @@ public RpcServer(final Server server, final String name, saslProps = Collections.emptyMap(); serverPrincipal = HConstants.EMPTY_STRING; } + this.saslProviders = new SaslServerAuthenticationProviders(conf); this.isOnlineLogProviderEnabled = getIsOnlineLogProviderEnabled(conf); this.scheduler = scheduler; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java index bca27f616450..468c0a62e7de 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java @@ -55,7 +55,6 @@ import org.apache.hadoop.hbase.security.SaslUtil; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProvider; -import org.apache.hadoop.hbase.security.provider.SaslServerAuthenticationProviders; import org.apache.hadoop.hbase.security.provider.SimpleSaslServerAuthenticationProvider; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.hadoop.hbase.util.ByteBufferUtils; @@ -137,13 +136,11 @@ abstract class ServerRpcConnection implements Closeable { protected User user = null; protected UserGroupInformation ugi = null; - protected SaslServerAuthenticationProviders saslProviders = null; protected X509Certificate[] clientCertificateChain = null; public ServerRpcConnection(RpcServer rpcServer) { this.rpcServer = rpcServer; this.callCleanup = null; - this.saslProviders = SaslServerAuthenticationProviders.getInstance(rpcServer.getConf()); } @Override @@ -781,7 +778,7 @@ protected final PreambleResponse processPreamble(ByteBuffer preambleBuffer) thro return PreambleResponse.CLOSE; } - this.provider = this.saslProviders.selectProvider(authByte); + this.provider = rpcServer.saslProviders.selectProvider(authByte); if (this.provider == null) { String msg = getFatalConnectionString(version, authByte); doBadPreambleHandling(msg, new BadAuthException(msg)); @@ -801,7 +798,7 @@ protected final PreambleResponse processPreamble(ByteBuffer preambleBuffer) thro if (!this.rpcServer.isSecurityEnabled && !isSimpleAuthentication()) { doRawSaslReply(SaslStatus.SUCCESS, new IntWritable(SaslUtil.SWITCH_TO_SIMPLE_AUTH), null, null); - provider = saslProviders.getSimpleProvider(); + provider = rpcServer.saslProviders.getSimpleProvider(); // client has already sent the initial Sasl message and we // should ignore it. Both client and server should fall back // to simple auth from now on. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/provider/SaslServerAuthenticationProviders.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/provider/SaslServerAuthenticationProviders.java index 17480cb4659b..385b2fb79f4e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/provider/SaslServerAuthenticationProviders.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/provider/SaslServerAuthenticationProviders.java @@ -19,7 +19,9 @@ import java.io.IOException; import java.lang.reflect.InvocationTargetException; +import java.util.Collections; import java.util.HashMap; +import java.util.Map; import java.util.Optional; import java.util.ServiceLoader; import java.util.concurrent.atomic.AtomicReference; @@ -35,14 +37,46 @@ public final class SaslServerAuthenticationProviders { LoggerFactory.getLogger(SaslClientAuthenticationProviders.class); public static final String EXTRA_PROVIDERS_KEY = "hbase.server.sasl.provider.extras"; - private static final AtomicReference holder = + + private static final AtomicReference HOLDER = new AtomicReference<>(); - private final HashMap providers; + private final Map providers; - private SaslServerAuthenticationProviders(Configuration conf, - HashMap providers) { - this.providers = providers; + /** + * Creates a new instance of SaslServerAuthenticationProviders. + * @param conf the configuration to use for loading providers + */ + public SaslServerAuthenticationProviders(Configuration conf) { + ServiceLoader loader = + ServiceLoader.load(SaslServerAuthenticationProvider.class); + HashMap providerMap = new HashMap<>(); + for (SaslServerAuthenticationProvider provider : loader) { + addProviderIfNotExists(provider, providerMap); + } + + addExtraProviders(conf, providerMap); + + if (LOG.isTraceEnabled()) { + String loadedProviders = providerMap.values().stream() + .map((provider) -> provider.getClass().getName()).collect(Collectors.joining(", ")); + if (loadedProviders.isEmpty()) { + loadedProviders = "None!"; + } + LOG.trace("Found SaslServerAuthenticationProviders {}", loadedProviders); + } + + // Initialize the providers once, before we get into the RPC path. + providerMap.forEach((b, provider) -> { + try { + // Give them a copy, just to make sure there is no funny-business going on. + provider.init(new Configuration(conf)); + } catch (IOException e) { + LOG.error("Failed to initialize {}", provider.getClass(), e); + throw new RuntimeException("Failed to initialize " + provider.getClass().getName(), e); + } + }); + this.providers = Collections.unmodifiableMap(providerMap); } /** @@ -54,19 +88,23 @@ public int getNumRegisteredProviders() { /** * Returns a singleton instance of {@link SaslServerAuthenticationProviders}. + * @deprecated Since 2.5.14 and 2.6.4, will be removed in newer minor release lines. This class + * should not be singleton, please do not use it any more. see HBASE-29144 for more + * details. */ + @Deprecated public static SaslServerAuthenticationProviders getInstance(Configuration conf) { - SaslServerAuthenticationProviders providers = holder.get(); + SaslServerAuthenticationProviders providers = HOLDER.get(); if (null == providers) { - synchronized (holder) { + synchronized (HOLDER) { // Someone else beat us here - providers = holder.get(); + providers = HOLDER.get(); if (null != providers) { return providers; } - providers = createProviders(conf); - holder.set(providers); + providers = new SaslServerAuthenticationProviders(conf); + HOLDER.set(providers); } } return providers; @@ -74,10 +112,14 @@ public static SaslServerAuthenticationProviders getInstance(Configuration conf) /** * Removes the cached singleton instance of {@link SaslServerAuthenticationProviders}. + * @deprecated Since 2.5.14 and 2.6.4, will be removed in newer minor release lines. This class + * should not be singleton, please do not use it any more. see HBASE-29144 for more + * details. */ + @Deprecated public static void reset() { - synchronized (holder) { - holder.set(null); + synchronized (HOLDER) { + HOLDER.set(null); } } @@ -129,43 +171,6 @@ static void addExtraProviders(Configuration conf, } } - /** - * Loads server authentication providers from the classpath and configuration, and then creates - * the SaslServerAuthenticationProviders instance. - */ - static SaslServerAuthenticationProviders createProviders(Configuration conf) { - ServiceLoader loader = - ServiceLoader.load(SaslServerAuthenticationProvider.class); - HashMap providers = new HashMap<>(); - for (SaslServerAuthenticationProvider provider : loader) { - addProviderIfNotExists(provider, providers); - } - - addExtraProviders(conf, providers); - - if (LOG.isTraceEnabled()) { - String loadedProviders = providers.values().stream() - .map((provider) -> provider.getClass().getName()).collect(Collectors.joining(", ")); - if (loadedProviders.isEmpty()) { - loadedProviders = "None!"; - } - LOG.trace("Found SaslServerAuthenticationProviders {}", loadedProviders); - } - - // Initialize the providers once, before we get into the RPC path. - providers.forEach((b, provider) -> { - try { - // Give them a copy, just to make sure there is no funny-business going on. - provider.init(new Configuration(conf)); - } catch (IOException e) { - LOG.error("Failed to initialize {}", provider.getClass(), e); - throw new RuntimeException("Failed to initialize " + provider.getClass().getName(), e); - } - }); - - return new SaslServerAuthenticationProviders(conf, providers); - } - /** * Selects the appropriate SaslServerAuthenticationProvider from those available. If there is no * matching provider for the given {@code authByte}, this method will return null. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/BasicReadWriteWithDifferentConnectionRegistriesTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/BasicReadWriteWithDifferentConnectionRegistriesTestBase.java new file mode 100644 index 000000000000..4d8d91737eae --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/BasicReadWriteWithDifferentConnectionRegistriesTestBase.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertFalse; + +import java.io.IOException; +import java.net.URI; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.TestInfo; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EnumSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class BasicReadWriteWithDifferentConnectionRegistriesTestBase { + + private static final Logger LOG = + LoggerFactory.getLogger(TestBasicReadWriteWithDifferentConnectionRegistries.class); + + protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); + + public enum RegistryImpl { + ZK, + RPC, + ZK_URI, + RPC_URI + } + + private TableName tableName; + + private byte[] FAMILY = Bytes.toBytes("family"); + + private Connection conn; + + protected abstract Configuration getConf(); + + protected abstract Connection createConn(URI uri) throws IOException; + + private void init(RegistryImpl impl) throws Exception { + switch (impl) { + case ZK: { + Configuration conf = getConf(); + conf.setClass(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, + ZKConnectionRegistry.class, ConnectionRegistry.class); + String quorum = UTIL.getZkCluster().getAddress().toString(); + String path = UTIL.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT); + conf.set(HConstants.CLIENT_ZOOKEEPER_QUORUM, quorum); + conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, path); + LOG.info("connect to cluster through zk quorum={} and parent={}", quorum, path); + conn = ConnectionFactory.createConnection(conf); + break; + } + case RPC: { + Configuration conf = getConf(); + conf.setClass(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, + RpcConnectionRegistry.class, ConnectionRegistry.class); + String bootstrapServers = + UTIL.getMiniHBaseCluster().getMaster().getServerName().getAddress().toString(); + conf.set(RpcConnectionRegistry.BOOTSTRAP_NODES, bootstrapServers); + LOG.info("connect to cluster through rpc bootstrap servers={}", bootstrapServers); + conn = ConnectionFactory.createConnection(conf); + break; + } + case ZK_URI: { + String quorum = UTIL.getZkCluster().getAddress().toString(); + String path = UTIL.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT); + URI connectionUri = new URI("hbase+zk://" + quorum + path); + LOG.info("connect to cluster through connection url: {}", connectionUri); + conn = createConn(connectionUri); + break; + } + case RPC_URI: { + URI connectionUri = new URI("hbase+rpc://" + + UTIL.getMiniHBaseCluster().getMaster().getServerName().getAddress().toString()); + LOG.info("connect to cluster through connection url: {}", connectionUri); + conn = createConn(connectionUri); + break; + } + default: + throw new IllegalArgumentException("Unknown impl: " + impl); + } + try (Admin admin = conn.getAdmin()) { + admin.createTable(TableDescriptorBuilder.newBuilder(tableName) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build()); + } + } + + @BeforeEach + public void setUp(TestInfo testInfo) { + String displayName = testInfo.getTestMethod().get().getName() + testInfo.getDisplayName(); + tableName = TableName.valueOf(displayName.replaceAll("[ \\[\\]]", "_")); + } + + @AfterEach + public void tearDown() throws Exception { + try (Admin admin = conn.getAdmin()) { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + conn.close(); + } + + @ParameterizedTest + @EnumSource(value = RegistryImpl.class) + public void testReadWrite(RegistryImpl impl) throws Exception { + init(impl); + byte[] row = Bytes.toBytes("row"); + byte[] qualifier = Bytes.toBytes("qualifier"); + byte[] value = Bytes.toBytes("value"); + try (Table table = conn.getTable(tableName)) { + Put put = new Put(row).addColumn(FAMILY, qualifier, value); + table.put(put); + Result result = table.get(new Get(row)); + assertArrayEquals(value, result.getValue(FAMILY, qualifier)); + table.delete(new Delete(row)); + assertFalse(table.exists(new Get(row))); + } + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java index cf0d1d2d11f6..d431c50b003c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBasicReadWriteWithDifferentConnectionRegistries.java @@ -17,161 +17,43 @@ */ package org.apache.hadoop.hbase.client; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertFalse; - +import java.io.IOException; import java.net.URI; -import java.util.ArrayList; -import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.TableNameTestRule; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Test basic read write operation with different {@link ConnectionRegistry} implementations. */ -@RunWith(Parameterized.class) -@Category({ MediumTests.class, ClientTests.class }) -public class TestBasicReadWriteWithDifferentConnectionRegistries { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBasicReadWriteWithDifferentConnectionRegistries.class); - - private static final Logger LOG = - LoggerFactory.getLogger(TestBasicReadWriteWithDifferentConnectionRegistries.class); - - private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - - public enum RegistryImpl { - ZK, - RPC, - ZK_URI, - RPC_URI - } - - @Parameter - public RegistryImpl impl; +@Tag(MediumTests.TAG) +@Tag(ClientTests.TAG) +public class TestBasicReadWriteWithDifferentConnectionRegistries + extends BasicReadWriteWithDifferentConnectionRegistriesTestBase { - @Rule - public final TableNameTestRule name = new TableNameTestRule(); - - private byte[] FAMILY = Bytes.toBytes("family"); - - private Connection conn; - - @Parameters(name = "{index}: impl={0}") - public static List data() { - List data = new ArrayList(); - for (RegistryImpl impl : RegistryImpl.values()) { - data.add(new Object[] { impl }); - } - return data; - } - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { UTIL.shutdownMiniCluster(); } - @Before - public void setUp() throws Exception { - switch (impl) { - case ZK: { - Configuration conf = HBaseConfiguration.create(); - conf.setClass(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, - ZKConnectionRegistry.class, ConnectionRegistry.class); - String quorum = UTIL.getZkCluster().getAddress().toString(); - String path = UTIL.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT); - conf.set(HConstants.CLIENT_ZOOKEEPER_QUORUM, quorum); - conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, path); - LOG.info("connect to cluster through zk quorum={} and parent={}", quorum, path); - conn = ConnectionFactory.createConnection(conf); - break; - } - case RPC: { - Configuration conf = HBaseConfiguration.create(); - conf.setClass(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, - RpcConnectionRegistry.class, ConnectionRegistry.class); - String bootstrapServers = - UTIL.getMiniHBaseCluster().getMaster().getServerName().getAddress().toString(); - conf.set(RpcConnectionRegistry.BOOTSTRAP_NODES, bootstrapServers); - LOG.info("connect to cluster through rpc bootstrap servers={}", bootstrapServers); - conn = ConnectionFactory.createConnection(conf); - break; - } - case ZK_URI: { - String quorum = UTIL.getZkCluster().getAddress().toString(); - String path = UTIL.getConfiguration().get(HConstants.ZOOKEEPER_ZNODE_PARENT); - URI connectionUri = new URI("hbase+zk://" + quorum + path); - LOG.info("connect to cluster through connection url: {}", connectionUri); - conn = ConnectionFactory.createConnection(connectionUri); - break; - } - case RPC_URI: { - URI connectionUri = new URI("hbase+rpc://" - + UTIL.getMiniHBaseCluster().getMaster().getServerName().getAddress().toString()); - LOG.info("connect to cluster through connection url: {}", connectionUri); - conn = ConnectionFactory.createConnection(connectionUri); - break; - } - default: - throw new IllegalArgumentException("Unknown impl: " + impl); - } - try (Admin admin = conn.getAdmin()) { - admin.createTable(TableDescriptorBuilder.newBuilder(name.getTableName()) - .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build()); - } - } - - @After - public void tearDown() throws Exception { - TableName tableName = name.getTableName(); - try (Admin admin = conn.getAdmin()) { - admin.disableTable(tableName); - admin.deleteTable(tableName); - } - conn.close(); + // here we want to test connecting to hbase cluster with connection registry only configuration or + // only connection URI, so in the below methods we just use an empty configuration + @Override + protected Configuration getConf() { + return HBaseConfiguration.create(); } - @Test - public void testReadWrite() throws Exception { - byte[] row = Bytes.toBytes("row"); - byte[] qualifier = Bytes.toBytes("qualifier"); - byte[] value = Bytes.toBytes("value"); - try (Table table = conn.getTable(name.getTableName())) { - Put put = new Put(row).addColumn(FAMILY, qualifier, value); - table.put(put); - Result result = table.get(new Get(row)); - assertArrayEquals(value, result.getValue(FAMILY, qualifier)); - table.delete(new Delete(row)); - assertFalse(table.exists(new Get(row))); - } + @Override + protected Connection createConn(URI uri) throws IOException { + return ConnectionFactory.createConnection(uri); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSecureBasicReadWriteWithDifferentConnectionRegistries.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSecureBasicReadWriteWithDifferentConnectionRegistries.java new file mode 100644 index 000000000000..a1b31f0cc361 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSecureBasicReadWriteWithDifferentConnectionRegistries.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.client; + +import java.io.File; +import java.io.IOException; +import java.net.URI; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ipc.RpcClient; +import org.apache.hadoop.hbase.testclassification.ClientTests; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.minikdc.MiniKdc; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; + +/** + * Test basic read write operation with different {@link ConnectionRegistry} implementations when + * security is enabled. + */ +@Tag(MediumTests.TAG) +@Tag(ClientTests.TAG) +public class TestSecureBasicReadWriteWithDifferentConnectionRegistries + extends BasicReadWriteWithDifferentConnectionRegistriesTestBase { + + protected static final File KEYTAB_FILE = + new File(UTIL.getDataTestDir("keytab").toUri().getPath()); + + private static MiniKdc KDC; + private static String HOST = "localhost"; + private static String PRINCIPAL; + private static String HTTP_PRINCIPAL; + + protected static void stopKDC() { + if (KDC != null) { + KDC.stop(); + } + } + + @BeforeAll + public static void setUpBeforeClass() throws Exception { + KDC = UTIL.setupMiniKdc(KEYTAB_FILE); + PRINCIPAL = "hbase/" + HOST; + HTTP_PRINCIPAL = "HTTP/" + HOST; + KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL); + // set a smaller timeout and retry to speed up tests + UTIL.getConfiguration().setInt(RpcClient.SOCKET_TIMEOUT_READ, 2000); + UTIL.getConfiguration().setInt("hbase.security.relogin.maxretries", 1); + UTIL.getConfiguration().setInt("hbase.security.relogin.maxbackoff", 100); + UTIL.startSecureMiniCluster(KDC, PRINCIPAL, HTTP_PRINCIPAL); + } + + @AfterAll + public static void tearDownAfterClass() throws Exception { + UTIL.shutdownMiniCluster(); + } + + // for connecting to secure hbase cluster, we need to get some information from Configuration, so + // here we need to use UTIL.getConfiguration to get the security related information + @Override + protected Configuration getConf() { + return new Configuration(UTIL.getConfiguration()); + } + + @Override + protected Connection createConn(URI uri) throws IOException { + return ConnectionFactory.createConnection(uri, UTIL.getConfiguration()); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java index 5f37416e6569..4acd1e836407 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java @@ -23,11 +23,11 @@ import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.getPrincipalForTesting; import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.loginKerberosPrincipal; import static org.apache.hadoop.hbase.security.HBaseKerberosUtils.setSecuredConfiguration; -import static org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProviders.SELECTOR_KEY; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThrows; @@ -42,8 +42,6 @@ import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; -import java.util.Map; -import javax.security.sasl.SaslClient; import javax.security.sasl.SaslException; import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.conf.Configuration; @@ -56,17 +54,10 @@ import org.apache.hadoop.hbase.ipc.RpcClientFactory; import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.RpcServerFactory; -import org.apache.hadoop.hbase.security.provider.AuthenticationProviderSelector; -import org.apache.hadoop.hbase.security.provider.BuiltInProviderSelector; -import org.apache.hadoop.hbase.security.provider.SaslAuthMethod; -import org.apache.hadoop.hbase.security.provider.SaslClientAuthenticationProvider; -import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.Test; +import org.junit.jupiter.api.TestTemplate; import org.mockito.Mockito; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; @@ -74,7 +65,6 @@ import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos; import org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface; -import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation; public class AbstractTestSecureIPC { @@ -120,7 +110,7 @@ protected final void setUpPrincipalAndConf() throws Exception { setSecuredConfiguration(serverConf); } - @Test + @TestTemplate public void testRpcCallWithEnabledKerberosSaslAuth() throws Exception { UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser(); @@ -132,8 +122,16 @@ public void testRpcCallWithEnabledKerberosSaslAuth() throws Exception { callRpcService(User.create(ugi2)); } - @Test - public void testRpcCallWithEnabledKerberosSaslAuthCanonicalHostname() throws Exception { + private static void setCanonicalHostName(InetAddress addr, String canonicalHostName) + throws Exception { + final Field field = InetAddress.class.getDeclaredField("canonicalHostName"); + field.setAccessible(true); + field.set(addr, canonicalHostName); + + } + + @TestTemplate + public void testRpcCallWithKerberosSaslAuthCanonicalHostname() throws Exception { UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser(); // check that the login user is okay: @@ -141,109 +139,33 @@ public void testRpcCallWithEnabledKerberosSaslAuthCanonicalHostname() throws Exc assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod()); assertEquals(krbPrincipal, ugi.getUserName()); - enableCanonicalHostnameTesting(clientConf, "localhost"); clientConf.setBoolean( SecurityConstants.UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS, false); clientConf.set(HBaseKerberosUtils.KRB_PRINCIPAL, "hbase/_HOST@" + KDC.getRealm()); - callRpcService(User.create(ugi2)); - } - - @Test - public void testRpcCallWithEnabledKerberosSaslAuthNoCanonicalHostname() throws Exception { - UserGroupInformation ugi2 = UserGroupInformation.getCurrentUser(); - - // check that the login user is okay: - assertSame(ugi2, ugi); - assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod()); - assertEquals(krbPrincipal, ugi.getUserName()); + // The InetAddress for localhost is always the same, so here we just modify it to simulate + // hostname mismatch + InetAddress addr = InetAddress.getByName(HOST); + String originalCanonicalHostname = addr.getCanonicalHostName(); + assertNotEquals("127.0.0.1", originalCanonicalHostname); + setCanonicalHostName(addr, "127.0.0.1"); + // should fail because of canonical hostname does not match the principal name + assertThrows(Exception.class, () -> callRpcService(User.create(ugi2))); - enableCanonicalHostnameTesting(clientConf, "127.0.0.1"); clientConf .setBoolean(SecurityConstants.UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS, true); - clientConf.set(HBaseKerberosUtils.KRB_PRINCIPAL, "hbase/_HOST@" + KDC.getRealm()); - + // should pass since we do not use canonical hostname callRpcService(User.create(ugi2)); - } - - private static void enableCanonicalHostnameTesting(Configuration conf, String canonicalHostname) { - conf.setClass(SELECTOR_KEY, CanonicalHostnameTestingAuthenticationProviderSelector.class, - AuthenticationProviderSelector.class); - conf.set(CanonicalHostnameTestingAuthenticationProviderSelector.CANONICAL_HOST_NAME_KEY, - canonicalHostname); - } - - public static class CanonicalHostnameTestingAuthenticationProviderSelector - extends BuiltInProviderSelector { - private static final String CANONICAL_HOST_NAME_KEY = - "CanonicalHostnameTestingAuthenticationProviderSelector.canonicalHostName"; - - @Override - public Pair> - selectProvider(String clusterId, User user) { - final Pair> pair = - super.selectProvider(clusterId, user); - pair.setFirst(createCanonicalHostNameTestingProvider(pair.getFirst())); - return pair; - } - - SaslClientAuthenticationProvider - createCanonicalHostNameTestingProvider(SaslClientAuthenticationProvider delegate) { - return new SaslClientAuthenticationProvider() { - @Override - public SaslClient createClient(Configuration conf, InetAddress serverAddr, - String serverPrincipal, Token token, boolean fallbackAllowed, - Map saslProps) throws IOException { - final String s = conf.get(CANONICAL_HOST_NAME_KEY); - if (s != null) { - try { - final Field canonicalHostName = - InetAddress.class.getDeclaredField("canonicalHostName"); - canonicalHostName.setAccessible(true); - canonicalHostName.set(serverAddr, s); - } catch (NoSuchFieldException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - - return delegate.createClient(conf, serverAddr, serverPrincipal, token, fallbackAllowed, - saslProps); - } - - @Override - public UserInformation getUserInfo(User user) { - return delegate.getUserInfo(user); - } - - @Override - public UserGroupInformation getRealUser(User ugi) { - return delegate.getRealUser(ugi); - } - - @Override - public boolean canRetry() { - return delegate.canRetry(); - } - @Override - public void relogin() throws IOException { - delegate.relogin(); - } - - @Override - public SaslAuthMethod getSaslAuthMethod() { - return delegate.getSaslAuthMethod(); - } - - @Override - public String getTokenKind() { - return delegate.getTokenKind(); - } - }; - } + clientConf.setBoolean( + SecurityConstants.UNSAFE_HBASE_CLIENT_KERBEROS_HOSTNAME_DISABLE_REVERSEDNS, false); + setCanonicalHostName(addr, originalCanonicalHostname); + // should pass since we set the canonical hostname back, which should be same with the one in + // the principal name + callRpcService(User.create(ugi2)); } - @Test + @TestTemplate public void testRpcServerFallbackToSimpleAuth() throws Exception { String clientUsername = "testuser"; UserGroupInformation clientUgi = @@ -259,7 +181,7 @@ public void testRpcServerFallbackToSimpleAuth() throws Exception { callRpcService(User.create(clientUgi)); } - @Test + @TestTemplate public void testRpcServerDisallowFallbackToSimpleAuth() throws Exception { String clientUsername = "testuser"; UserGroupInformation clientUgi = @@ -281,7 +203,7 @@ public void testRpcServerDisallowFallbackToSimpleAuth() throws Exception { } } - @Test + @TestTemplate public void testRpcClientFallbackToSimpleAuth() throws Exception { String serverUsername = "testuser"; UserGroupInformation serverUgi = @@ -296,7 +218,7 @@ public void testRpcClientFallbackToSimpleAuth() throws Exception { callRpcService(User.create(serverUgi), User.create(ugi)); } - @Test + @TestTemplate public void testRpcClientDisallowFallbackToSimpleAuth() throws Exception { String serverUsername = "testuser"; UserGroupInformation serverUgi = @@ -320,7 +242,7 @@ private void setRpcProtection(String clientProtection, String serverProtection) /** * Test various combinations of Server and Client qops. */ - @Test + @TestTemplate public void testSaslWithCommonQop() throws Exception { setRpcProtection("privacy,authentication", "authentication"); callRpcService(); @@ -338,7 +260,7 @@ public void testSaslWithCommonQop() throws Exception { callRpcService(); } - @Test + @TestTemplate public void testSaslNoCommonQop() throws Exception { setRpcProtection("integrity", "privacy"); SaslException se = assertThrows(SaslException.class, () -> callRpcService()); @@ -348,7 +270,7 @@ public void testSaslNoCommonQop() throws Exception { /** * Test sasl encryption with Crypto AES. */ - @Test + @TestTemplate public void testSaslWithCryptoAES() throws Exception { setRpcProtection("privacy", "privacy"); setCryptoAES("true", "true"); @@ -358,7 +280,7 @@ public void testSaslWithCryptoAES() throws Exception { /** * Test various combinations of Server and Client configuration for Crypto AES. */ - @Test + @TestTemplate public void testDifferentConfWithCryptoAES() throws Exception { setRpcProtection("privacy", "privacy"); @@ -446,7 +368,7 @@ public void run() { try { int[] messageSize = new int[] { 100, 1000, 10000 }; for (int i = 0; i < messageSize.length; i++) { - String input = RandomStringUtils.random(messageSize[i]); + String input = RandomStringUtils.insecure().next(messageSize[i]); String result = stub.echo(null, TestProtos.EchoRequestProto.newBuilder().setMessage(input).build()) .getMessage(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPC.java index 1120d56fb9fd..10863dbcb19a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSaslTlsIPC.java @@ -21,9 +21,10 @@ import java.security.Security; import java.util.ArrayList; import java.util.List; +import java.util.stream.Stream; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.io.crypto.tls.KeyStoreFileType; import org.apache.hadoop.hbase.io.crypto.tls.X509KeyType; import org.apache.hadoop.hbase.io.crypto.tls.X509TestContext; @@ -38,63 +39,60 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.bouncycastle.jce.provider.BouncyCastleProvider; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.provider.Arguments; -@RunWith(Parameterized.class) -@Category({ SecurityTests.class, LargeTests.class }) +@Tag(SecurityTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: caKeyType={0}, certKeyType={1}, keyPassword={2}," + + " acceptPlainText={3}, clientTlsEnabled={4}") public class TestSaslTlsIPC extends AbstractTestSecureIPC { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSaslTlsIPC.class); - private static X509TestContextProvider PROVIDER; - @Parameterized.Parameter(0) - public X509KeyType caKeyType; + private X509KeyType caKeyType; + + private X509KeyType certKeyType; - @Parameterized.Parameter(1) - public X509KeyType certKeyType; + private char[] keyPassword; - @Parameterized.Parameter(2) - public char[] keyPassword; + private boolean acceptPlainText; - @Parameterized.Parameter(3) - public boolean acceptPlainText; + private boolean clientTlsEnabled; - @Parameterized.Parameter(4) - public boolean clientTlsEnabled; + public TestSaslTlsIPC(X509KeyType caKeyType, X509KeyType certKeyType, char[] keyPassword, + boolean acceptPlainText, boolean clientTlsEnabled) { + this.caKeyType = caKeyType; + this.certKeyType = certKeyType; + this.keyPassword = keyPassword; + this.acceptPlainText = acceptPlainText; + this.clientTlsEnabled = clientTlsEnabled; + } private X509TestContext x509TestContext; - @Parameterized.Parameters( - name = "{index}: caKeyType={0}, certKeyType={1}, keyPassword={2}, acceptPlainText={3}," - + " clientTlsEnabled={4}") - public static List data() { - List params = new ArrayList<>(); + public static Stream parameters() { + List params = new ArrayList<>(); for (X509KeyType caKeyType : X509KeyType.values()) { for (X509KeyType certKeyType : X509KeyType.values()) { for (char[] keyPassword : new char[][] { "".toCharArray(), "pa$$w0rd".toCharArray() }) { // do not accept plain text - params.add(new Object[] { caKeyType, certKeyType, keyPassword, false, true }); + params.add(Arguments.of(caKeyType, certKeyType, keyPassword, false, true)); // support plain text and client enables tls - params.add(new Object[] { caKeyType, certKeyType, keyPassword, true, true }); + params.add(Arguments.of(caKeyType, certKeyType, keyPassword, true, true)); // support plain text and client disables tls - params.add(new Object[] { caKeyType, certKeyType, keyPassword, true, false }); + params.add(Arguments.of(caKeyType, certKeyType, keyPassword, true, false)); } } } - return params; + return params.stream(); } - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Security.addProvider(new BouncyCastleProvider()); File dir = new File(TEST_UTIL.getDataTestDir(TestSaslTlsIPC.class.getSimpleName()).toString()) @@ -112,14 +110,14 @@ public static void setUpBeforeClass() throws Exception { PROVIDER = new X509TestContextProvider(conf, dir); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws InterruptedException { stopKDC(); Security.removeProvider(BouncyCastleProvider.PROVIDER_NAME); TEST_UTIL.cleanupTestDir(); } - @Before + @BeforeEach public void setUp() throws Exception { x509TestContext = PROVIDER.get(caKeyType, certKeyType, keyPassword); x509TestContext.setConfigurations(KeyStoreFileType.JKS, KeyStoreFileType.JKS); @@ -129,7 +127,7 @@ public void setUp() throws Exception { setUpPrincipalAndConf(); } - @After + @AfterEach public void tearDown() { x509TestContext.clearConfigurations(); x509TestContext.getConf().unset(X509Util.TLS_CONFIG_OCSP); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java index 991c3a936cbd..346a77d66b9d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java @@ -19,9 +19,9 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.List; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import java.util.stream.Stream; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.ipc.BlockingRpcClient; import org.apache.hadoop.hbase.ipc.NettyRpcClient; import org.apache.hadoop.hbase.ipc.NettyRpcServer; @@ -30,57 +30,52 @@ import org.apache.hadoop.hbase.ipc.SimpleRpcServer; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.provider.Arguments; -@RunWith(Parameterized.class) -@Category({ SecurityTests.class, LargeTests.class }) +@Tag(SecurityTests.TAG) +@Tag(LargeTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: rpcClientImpl={0}, rpcServerImpl={1}") public class TestSecureIPC extends AbstractTestSecureIPC { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSecureIPC.class); - - @Parameters(name = "{index}: rpcClientImpl={0}, rpcServerImpl={1}") - public static Collection parameters() { - List params = new ArrayList<>(); + public static Stream parameters() { + List params = new ArrayList<>(); List rpcClientImpls = Arrays.asList(BlockingRpcClient.class.getName(), NettyRpcClient.class.getName()); List rpcServerImpls = Arrays.asList(SimpleRpcServer.class.getName(), NettyRpcServer.class.getName()); for (String rpcClientImpl : rpcClientImpls) { for (String rpcServerImpl : rpcServerImpls) { - params.add(new Object[] { rpcClientImpl, rpcServerImpl }); + params.add(Arguments.of(rpcClientImpl, rpcServerImpl)); } } - return params; + return params.stream(); } - @Parameter(0) public String rpcClientImpl; - @Parameter(1) public String rpcServerImpl; - @BeforeClass + public TestSecureIPC(String rpcClientImpl, String rpcServerImpl) { + this.rpcClientImpl = rpcClientImpl; + this.rpcServerImpl = rpcServerImpl; + } + + @BeforeAll public static void setUp() throws Exception { initKDCAndConf(); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { stopKDC(); TEST_UTIL.cleanupTestDir(); } - @Before + @BeforeEach public void setUpTest() throws Exception { setUpPrincipalAndConf(); clientConf.set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY, rpcClientImpl); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslServerAuthenticationProviders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslServerAuthenticationProviders.java index 7a55aac74fbe..ac557f557909 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslServerAuthenticationProviders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/provider/TestSaslServerAuthenticationProviders.java @@ -17,16 +17,15 @@ */ package org.apache.hadoop.hbase.security.provider; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -34,24 +33,13 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ SmallTests.class, SecurityTests.class }) +@Tag(SmallTests.TAG) +@Tag(SecurityTests.TAG) public class TestSaslServerAuthenticationProviders { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSaslServerAuthenticationProviders.class); - - @Before - public void reset() { - // Clear out any potentially bogus state from the providers class - SaslServerAuthenticationProviders.reset(); - } - @Test public void testCannotAddTheSameProviderTwice() { HashMap registeredProviders = new HashMap<>(); @@ -61,30 +49,11 @@ public void testCannotAddTheSameProviderTwice() { SaslServerAuthenticationProviders.addProviderIfNotExists(p1, registeredProviders); assertEquals(1, registeredProviders.size()); - try { - SaslServerAuthenticationProviders.addProviderIfNotExists(p2, registeredProviders); - } catch (RuntimeException e) { - } - - assertSame("Expected the original provider to be present", p1, - registeredProviders.entrySet().iterator().next().getValue()); - } + assertThrows(RuntimeException.class, + () -> SaslServerAuthenticationProviders.addProviderIfNotExists(p2, registeredProviders)); - @Test - public void testInstanceIsCached() { - Configuration conf = HBaseConfiguration.create(); - SaslServerAuthenticationProviders providers1 = - SaslServerAuthenticationProviders.getInstance(conf); - SaslServerAuthenticationProviders providers2 = - SaslServerAuthenticationProviders.getInstance(conf); - assertSame(providers1, providers2); - - SaslServerAuthenticationProviders.reset(); - - SaslServerAuthenticationProviders providers3 = - SaslServerAuthenticationProviders.getInstance(conf); - assertNotSame(providers1, providers3); - assertEquals(providers1.getNumRegisteredProviders(), providers3.getNumRegisteredProviders()); + assertSame(p1, registeredProviders.entrySet().iterator().next().getValue(), + "Expected the original provider to be present"); } @Test @@ -93,15 +62,14 @@ public void instancesAreInitialized() { conf.set(SaslServerAuthenticationProviders.EXTRA_PROVIDERS_KEY, InitCheckingSaslServerAuthenticationProvider.class.getName()); - SaslServerAuthenticationProviders providers = - SaslServerAuthenticationProviders.getInstance(conf); + SaslServerAuthenticationProviders providers = new SaslServerAuthenticationProviders(conf); SaslServerAuthenticationProvider provider = providers.selectProvider(InitCheckingSaslServerAuthenticationProvider.ID); assertEquals(InitCheckingSaslServerAuthenticationProvider.class, provider.getClass()); - assertTrue("Provider was not inititalized", - ((InitCheckingSaslServerAuthenticationProvider) provider).isInitialized()); + assertTrue(((InitCheckingSaslServerAuthenticationProvider) provider).isInitialized(), + "Provider was not inititalized"); } public static class InitCheckingSaslServerAuthenticationProvider