diff --git a/hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/CosNativeFileSystemStore.java b/hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/CosNativeFileSystemStore.java index d2484c0e47b3c..1cf139d06c578 100644 --- a/hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/CosNativeFileSystemStore.java +++ b/hadoop-cloud-storage-project/hadoop-cos/src/main/java/org/apache/hadoop/fs/cosn/CosNativeFileSystemStore.java @@ -58,7 +58,7 @@ import com.qcloud.cos.model.UploadPartRequest; import com.qcloud.cos.model.UploadPartResult; import com.qcloud.cos.region.Region; -import com.qcloud.cos.utils.Base64; +import java.util.Base64; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -168,7 +168,7 @@ private void storeFileWithRetry(String key, InputStream inputStream, byte[] md5Hash, long length) throws IOException { try { ObjectMetadata objectMetadata = new ObjectMetadata(); - objectMetadata.setContentMD5(Base64.encodeAsString(md5Hash)); + objectMetadata.setContentMD5(Base64.getEncoder().encodeToString(md5Hash)); objectMetadata.setContentLength(length); PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, inputStream, objectMetadata); diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java index 64d43307ffc2d..5779f4f9fbdee 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java @@ -15,7 +15,7 @@ import com.google.common.annotations.VisibleForTesting; import java.lang.reflect.Constructor; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.security.authentication.server.HttpConstants; import org.apache.hadoop.security.authentication.util.AuthToken; import org.apache.hadoop.security.authentication.util.KerberosUtil; @@ -149,7 +149,6 @@ public AppConfigurationEntry[] getAppConfigurationEntry(String appName) { } private URL url; - private Base64 base64; private ConnectionConfigurator connConfigurator; /** @@ -182,7 +181,6 @@ public void authenticate(URL url, AuthenticatedURL.Token token) throws IOException, AuthenticationException { if (!token.isSet()) { this.url = url; - base64 = new Base64(0); try { HttpURLConnection conn = token.openConnection(url, connConfigurator); conn.setRequestMethod(AUTH_HTTP_METHOD); @@ -369,7 +367,7 @@ public Void run() throws Exception { */ private void sendToken(HttpURLConnection conn, byte[] outToken) throws IOException { - String token = base64.encodeToString(outToken); + String token = Base64.getEncoder().encodeToString(outToken); conn.setRequestMethod(AUTH_HTTP_METHOD); conn.setRequestProperty(AUTHORIZATION, NEGOTIATE + " " + token); conn.connect(); @@ -388,7 +386,7 @@ private byte[] readToken(HttpURLConnection conn) "' header incorrect: " + authHeader); } String negotiation = authHeader.trim().substring((NEGOTIATE + " ").length()).trim(); - return base64.decode(negotiation); + return Base64.getDecoder().decode(negotiation); } throw new AuthenticationException("Invalid SPNEGO sequence, status code: " + status); } diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java index 50eeb2a965e27..41d30f7b72a22 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java @@ -16,7 +16,7 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.security.authentication.client.KerberosAuthenticator; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.ietf.jgss.GSSException; @@ -324,8 +324,7 @@ public AuthenticationToken authenticate(HttpServletRequest request, } else { authorization = authorization.substring( KerberosAuthenticator.NEGOTIATE.length()).trim(); - final Base64 base64 = new Base64(0); - final byte[] clientToken = base64.decode(authorization); + final byte[] clientToken = Base64.getDecoder().decode(authorization); try { final String serverPrincipal = KerberosUtil.getTokenServerName(clientToken); @@ -338,8 +337,7 @@ public AuthenticationToken authenticate(HttpServletRequest request, new PrivilegedExceptionAction() { @Override public AuthenticationToken run() throws Exception { - return runWithPrincipal(serverPrincipal, clientToken, - base64, response); + return runWithPrincipal(serverPrincipal, clientToken, response); } }); } catch (PrivilegedActionException ex) { @@ -356,7 +354,7 @@ public AuthenticationToken run() throws Exception { } private AuthenticationToken runWithPrincipal(String serverPrincipal, - byte[] clientToken, Base64 base64, HttpServletResponse response) throws + byte[] clientToken, HttpServletResponse response) throws IOException, GSSException { GSSContext gssContext = null; GSSCredential gssCreds = null; @@ -375,7 +373,7 @@ private AuthenticationToken runWithPrincipal(String serverPrincipal, byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length); if (serverToken != null && serverToken.length > 0) { - String authenticate = base64.encodeToString(serverToken); + String authenticate = Base64.getEncoder().encodeToString(serverToken); response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE + " " + authenticate); diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/LdapAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/LdapAuthenticationHandler.java index 8cc8d03447a99..2922bf805d5ab 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/LdapAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/LdapAuthenticationHandler.java @@ -31,7 +31,7 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.security.authentication.client.AuthenticationException; @@ -190,9 +190,8 @@ public AuthenticationToken authenticate(HttpServletRequest request, } else { authorization = authorization.substring(HttpConstants.BASIC.length()).trim(); - final Base64 base64 = new Base64(0); // As per RFC7617, UTF-8 charset should be used for decoding. - String[] credentials = new String(base64.decode(authorization), + String[] credentials = new String(Base64.getDecoder().decode(authorization), StandardCharsets.UTF_8).split(":", 2); if (credentials.length == 2) { token = authenticateUser(credentials[0], credentials[1]); diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java index e7b19a494fcc9..7f5cfe4b2cf36 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java @@ -13,7 +13,7 @@ */ package org.apache.hadoop.security.authentication.util; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.commons.codec.binary.StringUtils; import javax.crypto.Mac; @@ -94,7 +94,7 @@ protected String computeSignature(byte[] secret, String str) { Mac mac = Mac.getInstance(SIGNING_ALGORITHM); mac.init(key); byte[] sig = mac.doFinal(StringUtils.getBytesUtf8(str)); - return new Base64(0).encodeToString(sig); + return Base64.getEncoder().encodeToString(sig); } catch (NoSuchAlgorithmException | InvalidKeyException ex) { throw new RuntimeException("It should not happen, " + ex.getMessage(), ex); } diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java index 629b68bffbbd9..0f59f01d12c47 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java @@ -17,7 +17,7 @@ import org.apache.hadoop.security.authentication.KerberosTestUtils; import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.security.authentication.client.KerberosAuthenticator; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.ietf.jgss.GSSContext; @@ -314,8 +314,7 @@ public String call() throws Exception { byte[] inToken = new byte[0]; byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length); - Base64 base64 = new Base64(0); - return base64.encodeToString(outToken); + return Base64.getEncoder().encodeToString(outToken); } finally { if (gssContext != null) { @@ -356,7 +355,7 @@ public String call() throws Exception { @Test public void testRequestWithInvalidKerberosAuthorization() { - String token = new Base64(0).encodeToString(new byte[]{0, 1, 2}); + String token = Base64.getEncoder().encodeToString(new byte[]{0, 1, 2}); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); @@ -376,7 +375,7 @@ public void testRequestWithInvalidKerberosAuthorization() { @Test public void testRequestToWhitelist() throws Exception { - final String token = new Base64(0).encodeToString(new byte[]{0, 1, 2}); + final String token = Base64.getEncoder().encodeToString(new byte[]{0, 1, 2}); final HttpServletRequest request = Mockito.mock(HttpServletRequest.class); final HttpServletResponse response = Mockito.mock(HttpServletResponse.class); diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestLdapAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestLdapAuthenticationHandler.java index 59aef5a688363..1e312b68d1c68 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestLdapAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestLdapAuthenticationHandler.java @@ -21,7 +21,7 @@ import static org.apache.hadoop.security.authentication.server.LdapAuthenticationHandler.*; import static org.apache.hadoop.security.authentication.server.LdapConstants.*; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.directory.server.annotations.CreateLdapServer; import org.apache.directory.server.annotations.CreateTransport; import org.apache.directory.server.core.annotations.ApplyLdifs; @@ -99,10 +99,9 @@ public void testRequestWithInvalidAuthorization() throws Exception { HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); - final Base64 base64 = new Base64(0); String credentials = "bjones:invalidpassword"; Mockito.when(request.getHeader(HttpConstants.AUTHORIZATION_HEADER)) - .thenReturn(base64.encodeToString(credentials.getBytes())); + .thenReturn(Base64.getEncoder().encodeToString(credentials.getBytes())); Assert.assertNull(handler.authenticate(request, response)); Mockito.verify(response).setHeader(WWW_AUTHENTICATE, HttpConstants.BASIC); Mockito.verify(response).setStatus(HttpServletResponse.SC_UNAUTHORIZED); @@ -123,8 +122,7 @@ public void testRequestWithAuthorization() throws Exception { HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); - final Base64 base64 = new Base64(0); - String credentials = base64.encodeToString("bjones:p@ssw0rd".getBytes()); + String credentials = Base64.getEncoder().encodeToString("bjones:p@ssw0rd".getBytes()); String authHeader = HttpConstants.BASIC + " " + credentials; Mockito.when(request.getHeader(HttpConstants.AUTHORIZATION_HEADER)) .thenReturn(authHeader); @@ -141,8 +139,7 @@ public void testRequestWithWrongCredentials() throws Exception { HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); - final Base64 base64 = new Base64(0); - String credentials = base64.encodeToString("bjones:foo123".getBytes()); + String credentials = Base64.getEncoder().encodeToString("bjones:foo123".getBytes()); String authHeader = HttpConstants.BASIC + " " + credentials; Mockito.when(request.getHeader(HttpConstants.AUTHORIZATION_HEADER)) .thenReturn(authHeader); diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestMultiSchemeAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestMultiSchemeAuthenticationHandler.java index 735cb43ef3058..0e691b31e3887 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestMultiSchemeAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestMultiSchemeAuthenticationHandler.java @@ -30,7 +30,7 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.directory.server.annotations.CreateLdapServer; import org.apache.directory.server.annotations.CreateTransport; import org.apache.directory.server.core.annotations.ApplyLdifs; @@ -138,10 +138,9 @@ public void testRequestWithInvalidAuthorization() throws Exception { HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); - final Base64 base64 = new Base64(0); String credentials = "bjones:invalidpassword"; Mockito.when(request.getHeader(AUTHORIZATION_HEADER)) - .thenReturn(base64.encodeToString(credentials.getBytes())); + .thenReturn(Base64.getEncoder().encodeToString(credentials.getBytes())); Assert.assertNull(handler.authenticate(request, response)); Mockito.verify(response).addHeader(WWW_AUTHENTICATE_HEADER, BASIC); Mockito.verify(response).addHeader(WWW_AUTHENTICATE_HEADER, NEGOTIATE); @@ -153,8 +152,7 @@ public void testRequestWithLdapAuthorization() throws Exception { HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); - final Base64 base64 = new Base64(0); - String credentials = base64.encodeToString("bjones:p@ssw0rd".getBytes()); + String credentials = Base64.getEncoder().encodeToString("bjones:p@ssw0rd".getBytes()); String authHeader = BASIC + " " + credentials; Mockito.when(request.getHeader(AUTHORIZATION_HEADER)) .thenReturn(authHeader); @@ -168,7 +166,7 @@ public void testRequestWithLdapAuthorization() throws Exception { @Test(timeout = 60000) public void testRequestWithInvalidKerberosAuthorization() throws Exception { - String token = new Base64(0).encodeToString(new byte[]{0, 1, 2}); + String token = Base64.getEncoder().encodeToString(new byte[]{0, 1, 2}); HttpServletRequest request = Mockito.mock(HttpServletRequest.class); HttpServletResponse response = Mockito.mock(HttpServletResponse.class); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index 71ed4557b357b..e9eccd42a4bd3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.crypto.key.kms; -import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.key.KeyProvider; @@ -74,6 +73,7 @@ import java.util.Map; import java.util.Queue; import java.util.concurrent.ExecutionException; +import java.util.Base64; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension; @@ -701,7 +701,7 @@ private KeyVersion createKeyInternal(String name, byte[] material, jsonKey.put(KMSRESTConstants.LENGTH_FIELD, options.getBitLength()); if (material != null) { jsonKey.put(KMSRESTConstants.MATERIAL_FIELD, - Base64.encodeBase64String(material)); + Base64.getEncoder().encodeToString(material)); } if (options.getDescription() != null) { jsonKey.put(KMSRESTConstants.DESCRIPTION_FIELD, @@ -752,7 +752,7 @@ private KeyVersion rollNewVersionInternal(String name, byte[] material) Map jsonMaterial = new HashMap(); if (material != null) { jsonMaterial.put(KMSRESTConstants.MATERIAL_FIELD, - Base64.encodeBase64String(material)); + Base64.getEncoder().encodeToString(material)); } URL url = createURL(KMSRESTConstants.KEY_RESOURCE, name, null, null); HttpURLConnection conn = createConnection(url, HTTP_POST); @@ -816,9 +816,9 @@ public KeyVersion decryptEncryptedKey( Map jsonPayload = new HashMap(); jsonPayload.put(KMSRESTConstants.NAME_FIELD, encryptedKeyVersion.getEncryptionKeyName()); - jsonPayload.put(KMSRESTConstants.IV_FIELD, Base64.encodeBase64String( + jsonPayload.put(KMSRESTConstants.IV_FIELD, Base64.getEncoder().encodeToString( encryptedKeyVersion.getEncryptedKeyIv())); - jsonPayload.put(KMSRESTConstants.MATERIAL_FIELD, Base64.encodeBase64String( + jsonPayload.put(KMSRESTConstants.MATERIAL_FIELD, Base64.getEncoder().encodeToString( encryptedKeyVersion.getEncryptedKeyVersion().getMaterial())); URL url = createURL(KMSRESTConstants.KEY_VERSION_RESOURCE, encryptedKeyVersion.getEncryptionKeyVersionName(), @@ -846,9 +846,9 @@ public EncryptedKeyVersion reencryptEncryptedKey(EncryptedKeyVersion ekv) final Map jsonPayload = new HashMap<>(); jsonPayload.put(KMSRESTConstants.NAME_FIELD, ekv.getEncryptionKeyName()); jsonPayload.put(KMSRESTConstants.IV_FIELD, - Base64.encodeBase64String(ekv.getEncryptedKeyIv())); + Base64.getEncoder().encodeToString(ekv.getEncryptedKeyIv())); jsonPayload.put(KMSRESTConstants.MATERIAL_FIELD, - Base64.encodeBase64String(ekv.getEncryptedKeyVersion().getMaterial())); + Base64.getEncoder().encodeToString(ekv.getEncryptedKeyVersion().getMaterial())); final URL url = createURL(KMSRESTConstants.KEY_VERSION_RESOURCE, ekv.getEncryptionKeyVersionName(), KMSRESTConstants.EEK_SUB_RESOURCE, params); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java index e15968dd6d273..c4486d0c8a5bb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java @@ -20,12 +20,12 @@ import java.io.IOException; import org.apache.commons.codec.DecoderException; -import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Hex; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import com.google.common.base.Preconditions; +import java.util.Base64; /** * The value of XAttr is byte[], this class is to @@ -57,8 +57,7 @@ public enum XAttrCodec { private static final String HEX_PREFIX = "0x"; private static final String BASE64_PREFIX = "0s"; - private static final Base64 base64 = new Base64(0); - + /** * Decode string representation of a value and check whether it's * encoded. If the given string begins with 0x or 0X, it expresses @@ -87,7 +86,7 @@ public static byte[] decodeValue(String value) throws IOException { } } else if (en.equalsIgnoreCase(BASE64_PREFIX)) { value = value.substring(2, value.length()); - result = base64.decode(value); + result = Base64.getDecoder().decode(value); } } if (result == null) { @@ -113,7 +112,7 @@ public static String encodeValue(byte[] value, XAttrCodec encoding) if (encoding == HEX) { return HEX_PREFIX + Hex.encodeHexString(value); } else if (encoding == BASE64) { - return BASE64_PREFIX + base64.encodeToString(value); + return BASE64_PREFIX + Base64.getEncoder().encodeToString(value); } else { return "\"" + new String(value, "utf-8") + "\""; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java index 7453996ecab1c..3f44833fc8b05 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java @@ -23,7 +23,7 @@ import java.nio.charset.UnsupportedCharsetException; import java.util.ArrayList; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -76,7 +76,7 @@ public DefaultStringifier(Configuration conf, Class c) { @Override public T fromString(String str) throws IOException { try { - byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8")); + byte[] bytes = Base64.getDecoder().decode(str.getBytes("UTF-8")); inBuf.reset(bytes, bytes.length); T restored = deserializer.deserialize(null); return restored; @@ -91,7 +91,7 @@ public String toString(T obj) throws IOException { serializer.serialize(obj); byte[] buf = new byte[outBuf.getLength()]; System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length); - return new String(Base64.encodeBase64(buf), StandardCharsets.UTF_8); + return new String(Base64.getEncoder().encode(buf), StandardCharsets.UTF_8); } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index 7c3f14da21cf5..22e9b9a8cb159 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -39,7 +39,7 @@ import javax.security.sasl.SaslServer; import javax.security.sasl.SaslServerFactory; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -182,11 +182,11 @@ public static void init(Configuration conf) { } static String encodeIdentifier(byte[] identifier) { - return new String(Base64.encodeBase64(identifier), StandardCharsets.UTF_8); + return new String(Base64.getEncoder().encode(identifier), StandardCharsets.UTF_8); } static byte[] decodeIdentifier(String identifier) { - return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8)); + return Base64.getDecoder().decode(identifier.getBytes(StandardCharsets.UTF_8)); } public static T getIdentifier(String id, @@ -204,7 +204,7 @@ public static T getIdentifier(String id, } static char[] encodePassword(byte[] password) { - return new String(Base64.encodeBase64(password), + return new String(Base64.getEncoder().encode(password), StandardCharsets.UTF_8).toCharArray(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index 4f0f6fc4d444a..cad9461fbe1e7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -21,7 +21,7 @@ import com.google.common.collect.Maps; import com.google.common.primitives.Bytes; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -339,10 +339,9 @@ public void write(DataOutput out) throws IOException { private static String encodeWritable(Writable obj) throws IOException { DataOutputBuffer buf = new DataOutputBuffer(); obj.write(buf); - Base64 encoder = new Base64(0, null, true); byte[] raw = new byte[buf.getLength()]; System.arraycopy(buf.getData(), 0, raw, 0, buf.getLength()); - return encoder.encodeToString(raw); + return Base64.getUrlEncoder().encodeToString(raw); } /** @@ -357,9 +356,8 @@ private static void decodeWritable(Writable obj, throw new HadoopIllegalArgumentException( "Invalid argument, newValue is null"); } - Base64 decoder = new Base64(0, null, true); DataInputBuffer buf = new DataInputBuffer(); - byte[] decoded = decoder.decode(newValue); + byte[] decoded = Base64.getUrlDecoder().decode(newValue); buf.reset(decoded, decoded.length); obj.readFields(buf); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/KMSUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/KMSUtil.java index 5b48da15556a8..0b8814d1836b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/KMSUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/KMSUtil.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.util; -import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.key.KeyProvider; @@ -35,6 +34,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Base64; /** * Utils for KMS. @@ -101,7 +101,7 @@ public static Map toJSON(KeyProvider.KeyVersion keyVersion) { json.put(KMSRESTConstants.VERSION_NAME_FIELD, keyVersion.getVersionName()); json.put(KMSRESTConstants.MATERIAL_FIELD, - Base64.encodeBase64URLSafeString( + Base64.getUrlEncoder().encodeToString( keyVersion.getMaterial())); } return json; @@ -114,7 +114,7 @@ public static Map toJSON(EncryptedKeyVersion encryptedKeyVersion) { json.put(KMSRESTConstants.VERSION_NAME_FIELD, encryptedKeyVersion.getEncryptionKeyVersionName()); json.put(KMSRESTConstants.IV_FIELD, Base64 - .encodeBase64URLSafeString(encryptedKeyVersion.getEncryptedKeyIv())); + .getUrlEncoder().encodeToString(encryptedKeyVersion.getEncryptedKeyIv())); json.put(KMSRESTConstants.ENCRYPTED_KEY_VERSION_FIELD, toJSON(encryptedKeyVersion.getEncryptedKeyVersion())); } @@ -162,7 +162,7 @@ public static EncryptedKeyVersion parseJSONEncKeyVersion(String keyName, (String) valueMap.get(KMSRESTConstants.VERSION_NAME_FIELD), KMSRESTConstants.VERSION_NAME_FIELD); - byte[] iv = Base64.decodeBase64(checkNotNull( + byte[] iv = Base64.getDecoder().decode(checkNotNull( (String) valueMap.get(KMSRESTConstants.IV_FIELD), KMSRESTConstants.IV_FIELD)); @@ -174,7 +174,7 @@ public static EncryptedKeyVersion parseJSONEncKeyVersion(String keyName, encValueMap.get(KMSRESTConstants.VERSION_NAME_FIELD), KMSRESTConstants.VERSION_NAME_FIELD); - byte[] encKeyMaterial = Base64.decodeBase64(checkNotNull((String) + byte[] encKeyMaterial = Base64.getDecoder().decode(checkNotNull((String) encValueMap.get(KMSRESTConstants.MATERIAL_FIELD), KMSRESTConstants.MATERIAL_FIELD)); @@ -189,7 +189,7 @@ public static KeyProvider.KeyVersion parseJSONKeyVersion(Map valueMap) { if (!valueMap.isEmpty()) { byte[] material = (valueMap.containsKey(KMSRESTConstants.MATERIAL_FIELD)) ? - Base64.decodeBase64( + Base64.getDecoder().decode( (String) valueMap.get(KMSRESTConstants.MATERIAL_FIELD)) : null; String versionName = diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java index 94ff7a88493c7..4ef4f673ec45f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java @@ -48,7 +48,7 @@ import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FileStatus; @@ -361,7 +361,7 @@ private static Path writeSplitTestFile(FileSystem fs, Random rand, fs.getUri(), fs.getWorkingDirectory()); final Path file = new Path(wd, "test" + codec.getDefaultExtension()); final byte[] b = new byte[REC_SIZE]; - final Base64 b64 = new Base64(0, null); + final Base64.Encoder b64 = Base64.getEncoder(); Compressor cmp = CodecPool.getCompressor(codec); try (DataOutputStream fout = new DataOutputStream(codec.createOutputStream(fs.create(file, diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java index b6b425443babc..945f2494881f1 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java @@ -19,7 +19,6 @@ import com.google.common.base.Preconditions; import org.apache.hadoop.util.KMSUtil; -import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.crypto.key.KeyProvider; import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion; @@ -55,6 +54,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Base64; import static org.apache.hadoop.util.KMSUtil.checkNotEmpty; import static org.apache.hadoop.util.KMSUtil.checkNotNull; @@ -151,7 +151,7 @@ public Response createKey(Map jsonKey) throws Exception { @Override public KeyVersion run() throws Exception { KeyProvider.KeyVersion keyVersion = (material != null) - ? provider.createKey(name, Base64.decodeBase64(material), + ? provider.createKey(name, Base64.getDecoder().decode(material), options) : provider.createKey(name, options); provider.flush(); @@ -235,7 +235,7 @@ public Response rolloverKey(@PathParam("name") final String name, public KeyVersion run() throws Exception { KeyVersion keyVersion = (material != null) ? provider.rollNewVersion(name, - Base64.decodeBase64(material)) + Base64.getDecoder().decode(material)) : provider.rollNewVersion(name); provider.flush(); return keyVersion; @@ -625,10 +625,10 @@ public Response handleEncryptedKeyOp( String encMaterialStr = (String) jsonPayload.get(KMSRESTConstants.MATERIAL_FIELD); checkNotNull(ivStr, KMSRESTConstants.IV_FIELD); - final byte[] iv = Base64.decodeBase64(ivStr); + final byte[] iv = Base64.getDecoder().decode(ivStr); checkNotNull(encMaterialStr, KMSRESTConstants.MATERIAL_FIELD); - final byte[] encMaterial = Base64.decodeBase64(encMaterialStr); + final byte[] encMaterial = Base64.getDecoder().decode(encMaterialStr); Object retJSON; if (eekOp.equals(KMSRESTConstants.EEK_DECRYPT)) { KMSWebApp.getDecryptEEKCallsMeter().mark(); diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java index be63d028f1fd5..0d4800558b1fb 100644 --- a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java +++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java @@ -19,7 +19,7 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.IOFileFilter; -import org.apache.commons.net.util.Base64; +import java.util.Base64; import org.apache.commons.net.util.SubnetUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; @@ -636,7 +636,7 @@ private void enableDNSSECIfNecessary(Zone zone, Configuration conf, Name zoneName = zone.getOrigin(); DNSKEYRecord dnskeyRecord = dnsKeyRecs.get(zoneName); if (dnskeyRecord == null) { - byte[] key = Base64.decodeBase64(publicKey.getBytes("UTF-8")); + byte[] key = Base64.getDecoder().decode(publicKey.getBytes("UTF-8")); dnskeyRecord = new DNSKEYRecord(zoneName, DClass.IN, ttl, DNSKEYRecord.Flags.ZONE_KEY, @@ -661,8 +661,8 @@ private void enableDNSSECIfNecessary(Zone zone, Configuration conf, String privateExponent = props.getProperty("PrivateExponent"); RSAPrivateKeySpec privateSpec = new RSAPrivateKeySpec( - new BigInteger(1, Base64.decodeBase64(privateModulus)), - new BigInteger(1, Base64.decodeBase64(privateExponent))); + new BigInteger(1, Base64.getDecoder().decode(privateModulus)), + new BigInteger(1, Base64.getDecoder().decode(privateExponent))); KeyFactory factory = KeyFactory.getInstance("RSA"); privateKey = factory.generatePrivate(privateSpec); diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java index a0c4ca3970c5c..02e42822d21db 100644 --- a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java @@ -16,7 +16,7 @@ */ package org.apache.hadoop.registry.server.dns; -import org.apache.commons.net.util.Base64; +import java.util.Base64; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.registry.client.api.RegistryConstants; import org.apache.hadoop.registry.client.binding.RegistryUtils; @@ -445,14 +445,14 @@ public void testDNSKEYRecord() throws Exception { DNSKEYRecord.Flags.ZONE_KEY, DNSKEYRecord.Protocol.DNSSEC, DNSSEC.Algorithm.RSASHA256, - Base64.decodeBase64(publicK.getBytes())); + Base64.getDecoder().decode(publicK.getBytes())); assertNotNull(dnskeyRecord); RSAPrivateKeySpec privateSpec = new RSAPrivateKeySpec(new BigInteger(1, - Base64.decodeBase64( + Base64.getDecoder().decode( "7Ul6/QDPWSGVAK9/Se53X8I0dDDA8S7wE1yFm2F0PEo9Wfb3KsMIegBaPCIaw5LDd" + "LMg+trBJsfPImyOfSgsGEasfpB50UafJ2jGM2zDeb9IKY6NH9rssYEAwMUq" + "oWKiLiA7K43rqy8F5j7/m7Dvb7R6L0BDbSCp/qqX07OzltU=")), - new BigInteger(1, Base64.decodeBase64( + new BigInteger(1, Base64.getDecoder().decode( "MgbQ6DBYhskeufNGGdct0cGG/4wb0X183ggenwCv2dopDyOTPq+5xMb4Pz9Ndzgk/" + "yCY7mpaWIu9rttGOzrR+LBRR30VobPpMK1bMnzu2C0x08oYAguVwZB79DLC" + "705qmZpiaaFB+LnhG7VtpPiOBm3UzZxdrBfeq/qaKrXid60="))); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java index 8d6e318168b3d..be7cd9ff5d4b7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java @@ -36,7 +36,7 @@ import java.util.Set; import javax.security.sasl.Sasl; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CipherOption; @@ -145,7 +145,7 @@ public static Map createSaslPropertiesForEncryption( * @return key encoded as SASL password */ public static char[] encryptionKeyToPassword(byte[] encryptionKey) { - return new String(Base64.encodeBase64(encryptionKey, false), Charsets.UTF_8) + return new String(Base64.getEncoder().encode(encryptionKey), Charsets.UTF_8) .toCharArray(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java index acd1e505cbbc0..fcf9b698dabfb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java @@ -44,7 +44,7 @@ import javax.security.sasl.RealmChoiceCallback; import javax.security.sasl.Sasl; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CipherOption; @@ -346,7 +346,7 @@ private static String getUserNameFromEncryptionKey( DataEncryptionKey encryptionKey) { return encryptionKey.keyId + NAME_DELIMITER + encryptionKey.blockPoolId + NAME_DELIMITER + - new String(Base64.encodeBase64(encryptionKey.nonce, false), + new String(Base64.getEncoder().encode(encryptionKey.nonce), Charsets.UTF_8); } @@ -470,7 +470,7 @@ private void updateToken(Token accessToken, * @return SASL user name */ private static String buildUserName(Token blockToken) { - return new String(Base64.encodeBase64(blockToken.getIdentifier(), false), + return new String(Base64.getEncoder().encode(blockToken.getIdentifier()), Charsets.UTF_8); } @@ -482,7 +482,7 @@ private static String buildUserName(Token blockToken) { * @return SASL password */ private char[] buildClientPassword(Token blockToken) { - return new String(Base64.encodeBase64(blockToken.getPassword(), false), + return new String(Base64.getEncoder().encode(blockToken.getPassword()), Charsets.UTF_8).toCharArray(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/driver/impl/StateStoreSerializerPBImpl.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/driver/impl/StateStoreSerializerPBImpl.java index 5bd650d456c3b..988db98c576b4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/driver/impl/StateStoreSerializerPBImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/driver/impl/StateStoreSerializerPBImpl.java @@ -19,7 +19,7 @@ import java.io.IOException; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.commons.codec.binary.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.server.federation.store.driver.StateStoreSerializer; @@ -80,7 +80,7 @@ public byte[] serialize(BaseRecord record) { PBRecord recordPB = (PBRecord) record; Message msg = recordPB.getProto(); byte[] byteArray = msg.toByteArray(); - byteArray64 = Base64.encodeBase64(byteArray, false); + byteArray64 = Base64.getEncoder().encode(byteArray); } return byteArray64; } @@ -99,7 +99,7 @@ public T deserialize( T record = newRecord(clazz); if (record instanceof PBRecord) { PBRecord pbRecord = (PBRecord) record; - byte[] byteArray64 = Base64.encodeBase64(byteArray, false); + byte[] byteArray64 = Base64.getEncoder().encode(byteArray); String base64Encoded = StringUtils.newStringUtf8(byteArray64); pbRecord.readInstance(base64Encoded); } @@ -109,7 +109,7 @@ public T deserialize( @Override public T deserialize(String data, Class clazz) throws IOException { - byte[] byteArray64 = Base64.decodeBase64(data); + byte[] byteArray64 = Base64.getDecoder().decode(data); return deserialize(byteArray64, clazz); } } \ No newline at end of file diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java index 8422a8c4b6d15..a8601c341d008 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.lang.reflect.Method; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3; import org.apache.hadoop.thirdparty.protobuf.Message; @@ -138,7 +138,7 @@ public T getProtoOrBuilder() { */ @SuppressWarnings("unchecked") public void readInstance(String base64String) throws IOException { - byte[] bytes = Base64.decodeBase64(base64String); + byte[] bytes = Base64.getDecoder().decode(base64String); Message msg = getBuilder().mergeFrom(bytes).build(); this.proto = (P) msg; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java index ae17761c204ab..eebf414b89dcc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java @@ -41,7 +41,7 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CipherOption; @@ -271,7 +271,7 @@ private byte[] getEncryptionKeyFromUserName(String userName) } int keyId = Integer.parseInt(nameComponents[0]); String blockPoolId = nameComponents[1]; - byte[] nonce = Base64.decodeBase64(nameComponents[2]); + byte[] nonce = Base64.getDecoder().decode(nameComponents[2]); return blockPoolTokenSecretManager.retrieveDataEncryptionKey(keyId, blockPoolId, nonce); } @@ -323,7 +323,7 @@ private char[] buildServerPassword(String userName) throws IOException { BlockTokenIdentifier identifier = deserializeIdentifier(userName); byte[] tokenPassword = blockPoolTokenSecretManager.retrievePassword( identifier); - return (new String(Base64.encodeBase64(tokenPassword, false), + return (new String(Base64.getEncoder().encode(tokenPassword), Charsets.UTF_8)).toCharArray(); } @@ -339,7 +339,7 @@ private BlockTokenIdentifier deserializeIdentifier(String str) throws IOException { BlockTokenIdentifier identifier = new BlockTokenIdentifier(); identifier.readFields(new DataInputStream(new ByteArrayInputStream( - Base64.decodeBase64(str)))); + Base64.getDecoder().decode(str)))); return identifier; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java index 73ce3a8111770..c67d6dd422314 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/CryptoUtils.java @@ -21,7 +21,7 @@ import java.io.InputStream; import java.nio.ByteBuffer; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -135,7 +135,7 @@ public static FSDataOutputStream wrapIfNecessary(Configuration conf, out.write(iv); if (LOG.isDebugEnabled()) { LOG.debug("IV written to Stream [" - + Base64.encodeBase64URLSafeString(iv) + "]"); + + Base64.getUrlEncoder().encodeToString(iv) + "]"); } return new CryptoFSDataOutputStream(out, CryptoCodec.getInstance(conf), getBufferSize(conf), getEncryptionKey(), iv, closeOutputStream); @@ -180,7 +180,7 @@ public static InputStream wrapIfNecessary(Configuration conf, InputStream in, cryptoCodec.getCipherSuite().getAlgorithmBlockSize()); if (LOG.isDebugEnabled()) { LOG.debug("IV read from [" - + Base64.encodeBase64URLSafeString(iv) + "]"); + + Base64.getUrlEncoder().encodeToString(iv) + "]"); } return new CryptoInputStream(in, cryptoCodec, bufferSize, getEncryptionKey(), iv, offset + cryptoPadding(conf)); @@ -215,7 +215,7 @@ public static FSDataInputStream wrapIfNecessary(Configuration conf, cryptoCodec.getCipherSuite().getAlgorithmBlockSize()); if (LOG.isDebugEnabled()) { LOG.debug("IV read from Stream [" - + Base64.encodeBase64URLSafeString(iv) + "]"); + + Base64.getUrlEncoder().encodeToString(iv) + "]"); } return new CryptoFSDataInputStream(in, cryptoCodec, bufferSize, getEncryptionKey(), iv); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java index b083156082666..cd2df28f648f0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java @@ -26,7 +26,7 @@ import javax.crypto.SecretKey; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.WritableComparator; @@ -55,7 +55,7 @@ public class SecureShuffleUtils { * @param msg */ public static String generateHash(byte[] msg, SecretKey key) { - return new String(Base64.encodeBase64(generateByteHash(msg, key)), + return new String(Base64.getEncoder().encode(generateByteHash(msg, key)), Charsets.UTF_8); } @@ -98,7 +98,7 @@ public static String hashFromString(String enc_str, SecretKey key) */ public static void verifyReply(String base64Hash, String msg, SecretKey key) throws IOException { - byte[] hash = Base64.decodeBase64(base64Hash.getBytes(Charsets.UTF_8)); + byte[] hash = Base64.getDecoder().decode(base64Hash.getBytes(Charsets.UTF_8)); boolean res = verifyHash(hash, msg.getBytes(Charsets.UTF_8), key); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java index f9cfc0442e828..600acf895f5ed 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java @@ -23,7 +23,7 @@ import com.amazonaws.services.s3.model.ObjectMetadata; import org.apache.commons.codec.digest.DigestUtils; -import org.apache.commons.net.util.Base64; +import java.util.Base64; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -53,9 +53,9 @@ public static String convertKeyToMd5(FileSystem fs) { String base64Key = fs.getConf().getTrimmed( SERVER_SIDE_ENCRYPTION_KEY ); - byte[] key = Base64.decodeBase64(base64Key); + byte[] key = Base64.getDecoder().decode(base64Key); byte[] md5 = DigestUtils.md5(key); - return Base64.encodeBase64String(md5).trim(); + return Base64.getEncoder().encodeToString(md5).trim(); } /** diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/BlockBlobAppendStream.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/BlockBlobAppendStream.java index 5f051effefb9a..c9bda2bc98454 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/BlockBlobAppendStream.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/BlockBlobAppendStream.java @@ -41,13 +41,13 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; +import java.util.Base64; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.FSExceptionMessages; -import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.fs.StreamCapabilities; import org.apache.hadoop.fs.Syncable; import org.apache.hadoop.fs.azure.StorageInterface.CloudBlockBlobWrapper; @@ -691,7 +691,7 @@ private String generateOlderVersionBlockId(long id) { } return new String( - Base64.encodeBase64(blockIdInBytes), + Base64.getEncoder().encode(blockIdInBytes), StandardCharsets.UTF_8); } @@ -704,7 +704,7 @@ private String generateNewerVersionBlockId(String prefix, long id) { String blockIdSuffix = String.format("%06d", id); byte[] blockIdInBytes = (prefix + blockIdSuffix).getBytes(StandardCharsets.UTF_8); - return new String(Base64.encodeBase64(blockIdInBytes), StandardCharsets.UTF_8); + return new String(Base64.getEncoder().encode(blockIdInBytes), StandardCharsets.UTF_8); } /** diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java index c310e29870a6d..1f13ec2c065de 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java @@ -47,6 +47,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.Base64; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; @@ -97,7 +98,6 @@ import org.apache.hadoop.fs.azurebfs.services.SharedKeyCredentials; import org.apache.hadoop.fs.azurebfs.services.AbfsPerfTracker; import org.apache.hadoop.fs.azurebfs.services.AbfsPerfInfo; -import org.apache.hadoop.fs.azurebfs.utils.Base64; import org.apache.hadoop.fs.azurebfs.utils.CRC64; import org.apache.hadoop.fs.azurebfs.utils.DateTimeUtils; import org.apache.hadoop.fs.azurebfs.utils.UriUtils; @@ -814,7 +814,7 @@ private String generateContinuationTokenForXns(final String firstEntryName) { .append(SINGLE_WHITE_SPACE) .append(firstEntryName); - return Base64.encode(token.toString().getBytes(StandardCharsets.UTF_8)); + return Base64.getEncoder().encodeToString(token.toString().getBytes(StandardCharsets.UTF_8)); } // generate continuation token for non-xns account @@ -833,7 +833,7 @@ private String generateContinuationTokenForNonXns(String path, final String firs String date = simpleDateFormat.format(new Date()); String token = String.format("%06d!%s!%06d!%s!%06d!%s!", path.length(), path, startFrom.length(), startFrom, date.length(), date); - String base64EncodedToken = Base64.encode(token.getBytes(StandardCharsets.UTF_8)); + String base64EncodedToken = Base64.getEncoder().encodeToString(token.getBytes(StandardCharsets.UTF_8)); StringBuilder encodedTokenBuilder = new StringBuilder(base64EncodedToken.length() + 5); encodedTokenBuilder.append(String.format("%s!%d!", TOKEN_VERSION, base64EncodedToken.length())); @@ -1266,7 +1266,7 @@ private String convertXmsPropertiesToCommaSeparatedString(final Hashtable parseCommaSeparatedXmsProperties(String xMsPro throw new InvalidFileSystemPropertyException(xMsProperties); } - byte[] decodedValue = Base64.decode(nameValue[1]); + byte[] decodedValue = Base64.getDecoder().decode(nameValue[1]); final String value; try { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java index 5f54673d7ae38..8222b267d6c7c 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java @@ -44,7 +44,7 @@ import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants; import org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations; -import org.apache.hadoop.fs.azurebfs.utils.Base64; +import java.util.Base64; /** * Represents the shared key credentials used to access an Azure Storage @@ -76,7 +76,7 @@ public SharedKeyCredentials(final String accountName, throw new IllegalArgumentException("Invalid account key."); } this.accountName = accountName; - this.accountKey = Base64.decode(accountKey); + this.accountKey = Base64.getDecoder().decode(accountKey); initializeMac(); } @@ -108,7 +108,7 @@ private String computeHmac256(final String stringToSign) { synchronized (this) { hmac = hmacSha256.doFinal(utf8Bytes); } - return Base64.encode(hmac); + return Base64.getEncoder().encodeToString(hmac); } /** diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AzureBlobStorageTestAccount.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AzureBlobStorageTestAccount.java index e420dabb5d0da..7b03a2fbb43d1 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AzureBlobStorageTestAccount.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AzureBlobStorageTestAccount.java @@ -20,7 +20,6 @@ import com.microsoft.azure.storage.*; import com.microsoft.azure.storage.blob.*; -import com.microsoft.azure.storage.core.Base64; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -451,7 +450,7 @@ public static void configureSecureModeTestSettings(Configuration conf) { */ public static void setMockAccountKey(Configuration conf, String accountName) { conf.set(ACCOUNT_KEY_PROPERTY_NAME + accountName, - Base64.encode(new byte[] { 1, 2, 3 })); + Base64.getEncoder().encodeToString(new byte[] { 1, 2, 3 })); } private static URI createAccountUri(String accountName) diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestBlobDataValidation.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestBlobDataValidation.java index f54a2e17875b1..6c786d1e34991 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestBlobDataValidation.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestBlobDataValidation.java @@ -28,6 +28,7 @@ import java.io.OutputStream; import java.net.HttpURLConnection; import java.util.Arrays; +import java.util.Base64; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; @@ -47,7 +48,6 @@ import com.microsoft.azure.storage.blob.BlockEntry; import com.microsoft.azure.storage.blob.BlockSearchMode; import com.microsoft.azure.storage.blob.CloudBlockBlob; -import com.microsoft.azure.storage.core.Base64; /** * Test that we do proper data integrity validation with MD5 checks as @@ -113,7 +113,7 @@ private void testStoreBlobMd5(boolean expectMd5Stored) throws Exception { } // Mess with the content so it doesn't match the MD5. - String newBlockId = Base64.encode(new byte[] { 55, 44, 33, 22 }); + String newBlockId = Base64.getEncoder().encodeToString(new byte[] { 55, 44, 33, 22 }); blob.uploadBlock(newBlockId, new ByteArrayInputStream(new byte[] { 6, 45 }), 2); blob.commitBlockList(Arrays.asList(new BlockEntry[] { new BlockEntry( diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java index 45deb9ebeec4d..f76b99220a949 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java @@ -31,7 +31,7 @@ import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.LongConfigurationValidatorAnnotation; import org.apache.hadoop.fs.azurebfs.contracts.annotations.ConfigurationValidationAnnotations.Base64StringConfigurationValidatorAnnotation; import org.apache.hadoop.fs.azurebfs.contracts.exceptions.ConfigurationPropertyNotFoundException; -import org.apache.hadoop.fs.azurebfs.utils.Base64; +import java.util.Base64; import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_SSL_CHANNEL_MODE_KEY; import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.DEFAULT_READ_BUFFER_SIZE; @@ -98,8 +98,8 @@ public class TestAbfsConfigurationFieldsValidation { public TestAbfsConfigurationFieldsValidation() throws Exception { super(); this.accountName = "testaccount1.blob.core.windows.net"; - this.encodedString = Base64.encode("base64Value".getBytes(Charsets.UTF_8)); - this.encodedAccountKey = Base64.encode("someAccountKey".getBytes(Charsets.UTF_8)); + this.encodedString = Base64.getEncoder().encodeToString("base64Value".getBytes(Charsets.UTF_8)); + this.encodedAccountKey = Base64.getEncoder().encodeToString("someAccountKey".getBytes(Charsets.UTF_8)); Configuration configuration = new Configuration(); configuration.addResource(TestConfigurationKeys.TEST_CONFIGURATION_FILE_NAME); configuration.set(INT_KEY, "1234565"); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/diagnostics/TestConfigurationValidators.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/diagnostics/TestConfigurationValidators.java index f02eadc9a0491..403febec53ba0 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/diagnostics/TestConfigurationValidators.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/diagnostics/TestConfigurationValidators.java @@ -22,7 +22,7 @@ import org.junit.Test; import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidConfigurationValueException; -import org.apache.hadoop.fs.azurebfs.utils.Base64; +import java.util.Base64; import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.MIN_BUFFER_SIZE; import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.MAX_BUFFER_SIZE; @@ -106,7 +106,7 @@ public void testStringConfigValidatorThrowsIfMissingValidValue() throws Exceptio @Test public void testBase64StringConfigValidator() throws Exception { - String encodedVal = Base64.encode("someValue".getBytes()); + String encodedVal = Base64.getEncoder().encodeToString("someValue".getBytes()); Base64StringConfigurationBasicValidator base64StringConfigurationValidator = new Base64StringConfigurationBasicValidator(FAKE_KEY, "", false); assertEquals("", base64StringConfigurationValidator.validate(null)); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java index 121256c4dbcf7..fccdd36c6746b 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockDelegationSASTokenProvider.java @@ -34,7 +34,7 @@ import org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider; import org.apache.hadoop.fs.azurebfs.services.AbfsHttpHeader; import org.apache.hadoop.fs.azurebfs.services.AbfsHttpOperation; -import org.apache.hadoop.fs.azurebfs.utils.Base64; +import java.util.Base64; import org.apache.hadoop.fs.azurebfs.utils.DelegationSASGenerator; import org.apache.hadoop.fs.azurebfs.utils.SASGenerator; import org.apache.hadoop.security.AccessControlException; @@ -114,7 +114,7 @@ private byte[] getUserDelegationKey(String accountName, String appID, String app int beginIndex = responseBody.indexOf("") + "".length(); int endIndex = responseBody.indexOf(""); String value = responseBody.substring(beginIndex, endIndex); - return Base64.decode(value); + return Base64.getDecoder().decode(value); } /** diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java index 50ac20970f45f..ae5fb2e9e300a 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockSASTokenProvider.java @@ -24,7 +24,7 @@ import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.fs.azurebfs.AbfsConfiguration; -import org.apache.hadoop.fs.azurebfs.utils.Base64; +import java.util.Base64; import org.apache.hadoop.fs.azurebfs.utils.ServiceSASGenerator; /** @@ -45,7 +45,7 @@ private String generateSAS(byte[] accountKey, String accountName, String fileSys public void initialize(Configuration configuration, String accountName) throws IOException { try { AbfsConfiguration abfsConfig = new AbfsConfiguration(configuration, accountName); - accountKey = Base64.decode(abfsConfig.getStorageAccountKey()); + accountKey = Base64.getDecoder().decode(abfsConfig.getStorageAccountKey()); } catch (Exception ex) { throw new IOException(ex); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java index 339fa4b5a45cc..f2aa6a8b99aa7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java @@ -32,7 +32,6 @@ import com.google.common.base.Preconditions; -import org.apache.commons.codec.binary.Base64; import com.google.common.base.Strings; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; @@ -76,7 +75,6 @@ public class ApiServiceClient extends AppAdminClient { private static final Logger LOG = LoggerFactory.getLogger(ApiServiceClient.class); - private static final Base64 BASE_64_CODEC = new Base64(0); protected YarnClient yarnClient; public ApiServiceClient() { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/HttpUtil.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/HttpUtil.java index ac5c079b60e7b..528f4fe42127a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/HttpUtil.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/HttpUtil.java @@ -27,7 +27,7 @@ import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.security.authentication.util.KerberosUtil; @@ -49,7 +49,6 @@ public class HttpUtil { private static final Logger LOG = LoggerFactory.getLogger(HttpUtil.class); - private static final Base64 BASE_64_CODEC = new Base64(0); protected HttpUtil() { // prevents calls from subclass @@ -93,7 +92,7 @@ public String run() throws Exception { gssContext.dispose(); // Base64 encoded and stringified token for server LOG.debug("Got valid challenge for host {}", serverName); - return new String(BASE_64_CODEC.encode(outToken), + return new String(Base64.getEncoder().encode(outToken), StandardCharsets.US_ASCII); } catch (GSSException | IllegalAccessException | NoSuchFieldException | ClassNotFoundException e) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/YarnClientUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/YarnClientUtils.java index 94b13a07d3f1b..8fc5c346e461d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/YarnClientUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/util/YarnClientUtils.java @@ -26,7 +26,7 @@ import com.google.common.collect.ImmutableSet; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; @@ -51,7 +51,6 @@ public abstract class YarnClientUtils { private static final Logger LOG = LoggerFactory.getLogger(YarnClientUtils.class); - private static final Base64 BASE_64_CODEC = new Base64(0); private static final String ADD_LABEL_FORMAT_ERR_MSG = "Input format for adding node-labels is not correct, it should be " + "labelName1[(exclusive=true/false)],LabelName2[] .."; @@ -243,7 +242,7 @@ public String run() throws Exception { gssContext.dispose(); // Base64 encoded and stringified token for server LOG.debug("Got valid challenge for host {}", serverName); - return new String(BASE_64_CODEC.encode(outToken), + return new String(Base64.getEncoder().encode(outToken), StandardCharsets.US_ASCII); } catch (GSSException | IllegalAccessException | NoSuchFieldException | ClassNotFoundException e) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AuxiliaryServiceHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AuxiliaryServiceHelper.java index 1374d96f26113..73f993ca44903 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AuxiliaryServiceHelper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AuxiliaryServiceHelper.java @@ -21,7 +21,7 @@ import java.nio.ByteBuffer; import java.util.Map; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; public class AuxiliaryServiceHelper { @@ -34,7 +34,7 @@ public static ByteBuffer getServiceDataFromEnv(String serviceName, if (null == meta) { return null; } - byte[] metaData = Base64.decodeBase64(meta); + byte[] metaData = Base64.getDecoder().decode(meta); return ByteBuffer.wrap(metaData); } @@ -42,7 +42,7 @@ public static void setServiceDataIntoEnv(String serviceName, ByteBuffer metaData, Map env) { byte[] byteData = metaData.array(); env.put(getPrefixServiceName(serviceName), - Base64.encodeBase64String(byteData)); + Base64.getEncoder().encodeToString(byteData)); } public static String getPrefixServiceName(String serviceName) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index bda78032e16c7..e1e4dbe0d80bc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -56,7 +56,7 @@ import com.google.common.base.Supplier; import com.google.common.collect.Lists; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -1161,7 +1161,7 @@ public Boolean get() { for (String serviceName : containerManager.getAuxServiceMetaData().keySet()) { Assert.assertEquals( containerManager.getAuxServiceMetaData().get(serviceName), - ByteBuffer.wrap(Base64.decodeBase64(reader.readLine().getBytes()))); + ByteBuffer.wrap(Base64.getDecoder().decode(reader.readLine().getBytes()))); } Assert.assertEquals(cId.toString(), containerLaunchContext diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppUtil.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppUtil.java index 1fd19fdb294d0..78ef30b30619d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppUtil.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppUtil.java @@ -27,7 +27,7 @@ import javax.servlet.http.HttpServletRequest; -import org.apache.commons.codec.binary.Base64; +import java.util.Base64; import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -278,8 +278,7 @@ private static ContainerLaunchContext createContainerLaunchContext( for (Map.Entry entry : newApp .getContainerLaunchContextInfo().getAuxillaryServiceData().entrySet()) { if (!entry.getValue().isEmpty()) { - Base64 decoder = new Base64(0, null, true); - byte[] data = decoder.decode(entry.getValue()); + byte[] data = Base64.getUrlDecoder().decode(entry.getValue()); hmap.put(entry.getKey(), ByteBuffer.wrap(data)); } } @@ -327,8 +326,7 @@ private static Credentials createCredentials(CredentialsInfo credentials) { for (Map.Entry entry : credentials.getSecrets() .entrySet()) { Text alias = new Text(entry.getKey()); - Base64 decoder = new Base64(0, null, true); - byte[] secret = decoder.decode(entry.getValue()); + byte[] secret = Base64.getUrlDecoder().decode(entry.getValue()); ret.addSecretKey(alias, secret); } } catch (IOException ie) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java index f5f21aac24945..631f031214117 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java @@ -40,6 +40,7 @@ import java.util.Map; import java.util.Properties; import java.util.Set; +import java.util.Base64; import javax.servlet.FilterConfig; import javax.servlet.ServletException; @@ -49,7 +50,6 @@ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; -import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.io.Text; @@ -810,7 +810,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia) CredentialsInfo credentials = new CredentialsInfo(); HashMap tokens = new HashMap<>(); HashMap secrets = new HashMap<>(); - secrets.put("secret1", Base64.encodeBase64String( + secrets.put("secret1", Base64.getEncoder().encodeToString( "mysecret".getBytes("UTF8"))); credentials.setSecrets(secrets); credentials.setTokens(tokens); @@ -834,7 +834,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia) appInfo.getContainerLaunchContextInfo().setEnvironment(environment); appInfo.getContainerLaunchContextInfo().setAcls(acls); appInfo.getContainerLaunchContextInfo().getAuxillaryServiceData() - .put("test", Base64.encodeBase64URLSafeString("value12".getBytes("UTF8"))); + .put("test", Base64.getUrlEncoder().encodeToString("value12".getBytes("UTF8"))); appInfo.getContainerLaunchContextInfo().setCredentials(credentials); appInfo.getResource().setMemory(1024); appInfo.getResource().setvCores(1);