Skip to content

Commit

Permalink
HBASE-28714 Hadoop check for hadoop 3.4.0 is failing (#6064)
Browse files Browse the repository at this point in the history
Signed-off-by: Nick Dimiduk <ndimiduk@apache.org>
  • Loading branch information
Apache9 authored Jul 10, 2024
1 parent 4b7bf83 commit d2d341e
Show file tree
Hide file tree
Showing 5 changed files with 29 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import static org.apache.hbase.thirdparty.io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
import static org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;

import com.google.protobuf.CodedOutputStream;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.lang.reflect.InvocationTargetException;
Expand Down Expand Up @@ -92,6 +91,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream;
import org.apache.hbase.thirdparty.io.netty.bootstrap.Bootstrap;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufAllocator;
Expand Down Expand Up @@ -409,7 +409,7 @@ private static void requestWriteBlock(Channel channel, StorageType storageType,
writeBlockProtoBuilder.setStorageType(PBHelperClient.convertStorageType(storageType)).build();
int protoLen = proto.getSerializedSize();
ByteBuf buffer =
channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen);
channel.alloc().buffer(3 + CodedOutputStream.computeUInt32SizeNoTag(protoLen) + protoLen);
buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION);
buffer.writeByte(Op.WRITE_BLOCK.code);
proto.writeDelimitedTo(new ByteBufOutputStream(buffer));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY;
import static org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE;

import com.google.protobuf.CodedOutputStream;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
Expand Down Expand Up @@ -81,6 +80,7 @@
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
import org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream;
import org.apache.hbase.thirdparty.io.netty.buffer.CompositeByteBuf;
Expand Down Expand Up @@ -391,7 +391,7 @@ static void wrapAndSetPayload(DataTransferEncryptorMessageProto.Builder builder,
Class<?> builderClass = DataTransferEncryptorMessageProto.Builder.class;

// Try the unrelocated ByteString
Class<?> byteStringClass = com.google.protobuf.ByteString.class;
Class<?> byteStringClass;
try {
// See if it can load the relocated ByteString, which comes from hadoop-thirdparty.
byteStringClass = Class.forName("org.apache.hadoop.thirdparty.protobuf.ByteString");
Expand All @@ -400,6 +400,12 @@ static void wrapAndSetPayload(DataTransferEncryptorMessageProto.Builder builder,
} catch (ClassNotFoundException e) {
LOG.debug("Did not find relocated ByteString class from hadoop-thirdparty."
+ " Assuming this is below Hadoop 3.3.0", e);
try {
byteStringClass = Class.forName("com.google.protobuf.ByteString");
LOG.debug("com.google.protobuf.ByteString found.");
} catch (ClassNotFoundException ex) {
throw new RuntimeException(ex);
}
}

// LiteralByteString is a package private class in protobuf. Make it accessible.
Expand Down Expand Up @@ -446,7 +452,7 @@ private void sendSaslMessage(ChannelHandlerContext ctx, byte[] payload,
}
DataTransferEncryptorMessageProto proto = builder.build();
int size = proto.getSerializedSize();
size += CodedOutputStream.computeRawVarint32Size(size);
size += CodedOutputStream.computeUInt32SizeNoTag(size);
ByteBuf buf = ctx.alloc().buffer(size);
proto.writeDelimitedTo(new ByteBufOutputStream(buf));
safeWrite(ctx, buf);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,18 +109,20 @@ protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out)

static {
boolean hasParser = false;

// These are the protobuf classes coming from Hadoop. Not the one from hbase-shaded-protobuf
protobufMessageLiteClass = com.google.protobuf.MessageLite.class;
protobufMessageLiteBuilderClass = com.google.protobuf.MessageLite.Builder.class;

try {
protobufMessageLiteClass = Class.forName("org.apache.hadoop.thirdparty.protobuf.MessageLite");
protobufMessageLiteBuilderClass =
Class.forName("org.apache.hadoop.thirdparty.protobuf.MessageLite$Builder");
LOG.debug("Hadoop 3.3 and above shades protobuf.");
} catch (ClassNotFoundException e) {
LOG.debug("Hadoop 3.2 and below use unshaded protobuf.", e);
try {
protobufMessageLiteClass = Class.forName("com.google.protobuf.MessageLite");
protobufMessageLiteBuilderClass = Class.forName("com.google.protobuf.MessageLite$Builder");
} catch (ClassNotFoundException ex) {
throw new RuntimeException("can not initialize protobuf related classes for hadoop", ex);
}
}

try {
Expand All @@ -130,7 +132,7 @@ protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out)
hasParser = true;
} catch (NoSuchMethodException e) {
// If the method is not found, we are in trouble. Abort.
throw new RuntimeException(e);
throw new RuntimeException("can not initialize protobuf related classes for hadoop", e);
}

HAS_PARSER = hasParser;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
Expand All @@ -47,6 +46,8 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.io.Closeables;

@Category({ MediumTests.class, ClientTests.class })
public class TestFlushFromClient {

Expand Down Expand Up @@ -76,7 +77,7 @@ public static void setUpBeforeClass() throws Exception {

@AfterClass
public static void tearDownAfterClass() throws Exception {
IOUtils.cleanup(null, asyncConn);
Closeables.close(asyncConn, true);
TEST_UTIL.shutdownMiniCluster();
}

Expand Down
8 changes: 8 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4284,6 +4284,10 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down Expand Up @@ -4374,6 +4378,10 @@
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down

0 comments on commit d2d341e

Please sign in to comment.