From 55ebfb9f9f32788d2e03a8130cbe9c6bbc336d91 Mon Sep 17 00:00:00 2001 From: Andrew Purtell Date: Sun, 24 Jul 2022 21:12:32 -0700 Subject: [PATCH 1/3] HBASE-27234 Clean up error-prone warnings in hbase-examples --- .../org/apache/hadoop/hbase/util/Bytes.java | 19 ++++++++++-- .../client/example/HttpProxyExample.java | 21 +++++++++---- .../example/MultiThreadedClientExample.java | 3 +- .../example/BulkDeleteEndpoint.java | 6 ++-- .../ExampleRegionObserverWithMetrics.java | 2 ++ .../coprocessor/example/RowCountEndpoint.java | 21 +++++-------- .../hadoop/hbase/mapreduce/IndexBuilder.java | 1 + .../hbase/mapreduce/SampleUploader.java | 19 +++++++----- ...ShadeSaslServerAuthenticationProvider.java | 5 ++-- .../hadoop/hbase/thrift/DemoClient.java | 13 ++++---- .../hadoop/hbase/thrift/HttpDoAsClient.java | 30 +++++-------------- .../hadoop/hbase/thrift2/DemoClient.java | 8 ++--- .../apache/hadoop/hbase/util/ClientUtils.java | 20 ++++++++++--- .../WriteHeavyIncrementObserverTestBase.java | 2 ++ .../TestShadeSaslAuthenticationProvider.java | 5 ++-- 15 files changed, 98 insertions(+), 77 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 6fd2b01a078b..ac36f83f89dd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -485,8 +485,7 @@ private static byte[] readBytes(ByteBuffer buf) { } /** - * @param b Presumed UTF-8 encoded byte array. - * @return String made from b + * Returns String made from b */ public static String toString(final byte[] b) { if (b == null) { @@ -495,6 +494,21 @@ public static String toString(final byte[] b) { return toString(b, 0, b.length); } + /** + * Returns String made from b + */ + public static String toString(ByteBuffer buf) { + if (buf == null) { + return null; + } + if (!buf.hasArray()) { + buf = ByteBuffer.wrap(buf.array(), buf.arrayOffset(), buf.remaining()); + return toString(buf.array()); + } else { + return toString(buf.array(), buf.arrayOffset(), buf.remaining()); + } + } + /** * Joins two byte arrays together using a separator. * @param b1 The first byte array. @@ -2432,4 +2446,5 @@ public static int findCommonPrefix(byte[] left, byte[] right, int leftLength, in } return result; } + } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java index 9ecd266b5dde..9f9e4ae9b730 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java @@ -22,6 +22,8 @@ import java.io.IOException; import java.net.InetSocketAddress; +import java.util.Iterator; +import java.util.List; import java.util.concurrent.ExecutionException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -35,6 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.io.netty.bootstrap.ServerBootstrap; import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf; @@ -158,12 +161,20 @@ private void write(ChannelHandlerContext ctx, HttpResponseStatus status, String } private Params parse(FullHttpRequest req) { - String[] components = new QueryStringDecoder(req.uri()).path().split("/"); - Preconditions.checkArgument(components.length == 4, "Unrecognized uri: %s", req.uri()); + List components = + Splitter.on('/').splitToList(new QueryStringDecoder(req.uri()).path()); + Preconditions.checkArgument(components.size() == 4, "Unrecognized uri: %s", req.uri()); + Iterator i = components.iterator(); // path is start with '/' so split will give an empty component - String[] cfAndCq = components[3].split(":"); - Preconditions.checkArgument(cfAndCq.length == 2, "Unrecognized uri: %s", req.uri()); - return new Params(components[1], components[2], cfAndCq[0], cfAndCq[1]); + i.next(); + String table = i.next(); + String row = i.next(); + List cfAndCq = Splitter.on(':').splitToList(i.next()); + Preconditions.checkArgument(cfAndCq.size() == 2, "Unrecognized uri: %s", req.uri()); + i = cfAndCq.iterator(); + String family = i.next(); + String qualifier = i.next(); + return new Params(table, row, family, qualifier); } private void get(ChannelHandlerContext ctx, FullHttpRequest req) { diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java index 963a182790fb..ca3470d7b6b1 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java @@ -30,7 +30,6 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.Cell.Type; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.TableName; @@ -240,7 +239,7 @@ public Boolean call() throws Exception { byte[] rk = Bytes.toBytes(ThreadLocalRandom.current().nextLong()); Put p = new Put(rk); p.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(rk).setFamily(FAMILY) - .setQualifier(QUAL).setTimestamp(p.getTimestamp()).setType(Type.Put).setValue(value) + .setQualifier(QUAL).setTimestamp(p.getTimestamp()).setType(Cell.Type.Put).setValue(value) .build()); t.put(p); } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java index 243b23e70af3..e7424990edea 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Scan; @@ -52,7 +51,6 @@ import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest; import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType; import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse; -import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder; import org.apache.hadoop.hbase.shaded.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -157,7 +155,7 @@ public void delete(RpcController controller, BulkDeleteRequest request, } OperationStatus[] opStatus = region.batchMutate(deleteArr); for (i = 0; i < opStatus.length; i++) { - if (opStatus[i].getOperationStatusCode() != OperationStatusCode.SUCCESS) { + if (opStatus[i].getOperationStatusCode() != HConstants.OperationStatusCode.SUCCESS) { break; } totalRowsDeleted++; @@ -183,7 +181,7 @@ public void delete(RpcController controller, BulkDeleteRequest request, } } } - Builder responseBuilder = BulkDeleteResponse.newBuilder(); + BulkDeleteResponse.Builder responseBuilder = BulkDeleteResponse.newBuilder(); responseBuilder.setRowsDeleted(totalRowsDeleted); if (deleteType == DeleteType.VERSION) { responseBuilder.setVersionsDeleted(totalVersionsDeleted); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java index 9d16a580b6eb..7074164e43ee 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.java @@ -109,6 +109,8 @@ private void performCostlyOperation() { // simulate the operation by sleeping. Thread.sleep(ThreadLocalRandom.current().nextLong(100)); } catch (InterruptedException ignore) { + // Restore the interrupt status + Thread.currentThread().interrupt(); } } } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java index e640ad854e4f..df0c5d72aafc 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java @@ -21,6 +21,8 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.mutable.MutableLong; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -97,10 +99,7 @@ public void getRowCount(RpcController controller, CountRequest request, CoprocessorRpcUtils.setControllerException(controller, ioe); } finally { if (scanner != null) { - try { - scanner.close(); - } catch (IOException ignored) { - } + IOUtils.closeQuietly(scanner); } } done.run(response); @@ -118,24 +117,18 @@ public void getKeyValueCount(RpcController controller, CountRequest request, scanner = env.getRegion().getScanner(new Scan()); List results = new ArrayList<>(); boolean hasMore = false; - long count = 0; + MutableLong count = new MutableLong(); do { hasMore = scanner.next(results); - for (Cell kv : results) { - count++; - } + results.forEach((r) -> count.increment()); results.clear(); } while (hasMore); - - response = CountResponse.newBuilder().setCount(count).build(); + response = CountResponse.newBuilder().setCount(count.longValue()).build(); } catch (IOException ioe) { CoprocessorRpcUtils.setControllerException(controller, ioe); } finally { if (scanner != null) { - try { - scanner.close(); - } catch (IOException ignored) { - } + IOUtils.closeQuietly(scanner); } } done.run(response); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java index 51f5b53be993..65588419541a 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java @@ -131,6 +131,7 @@ public static Job configureJob(Configuration conf, String[] args) throws IOExcep return job; } + @Override public int run(String[] args) throws Exception { Configuration conf = HBaseConfiguration.create(getConf()); if (args.length < 3) { diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java index d1397674f5df..13e363b076ea 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; +import java.util.Iterator; +import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; @@ -37,6 +39,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.base.Splitter; + /** * Sample Uploader MapReduce *

@@ -80,16 +84,16 @@ public void map(LongWritable key, Text line, Context context) throws IOException // Each line is comma-delimited; row,family,qualifier,value // Split CSV line - String[] values = line.toString().split(","); - if (values.length != 4) { + List values = Splitter.on(',').splitToList(line.toString()); + if (values.size() != 4) { return; } - + Iterator i = values.iterator(); // Extract each value - byte[] row = Bytes.toBytes(values[0]); - byte[] family = Bytes.toBytes(values[1]); - byte[] qualifier = Bytes.toBytes(values[2]); - byte[] value = Bytes.toBytes(values[3]); + byte[] row = Bytes.toBytes(i.next()); + byte[] family = Bytes.toBytes(i.next()); + byte[] qualifier = Bytes.toBytes(i.next()); + byte[] value = Bytes.toBytes(i.next()); // Create Put Put put = new Put(row); @@ -136,6 +140,7 @@ public static Job configureJob(Configuration conf, String[] args) throws IOExcep * @param otherArgs The command line parameters after ToolRunner handles standard. * @throws Exception When running the job fails. */ + @Override public int run(String[] otherArgs) throws Exception { if (otherArgs.length != 2) { System.err.println("Wrong number of arguments: " + otherArgs.length); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslServerAuthenticationProvider.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslServerAuthenticationProvider.java index b5d8ffec7d5b..9f0e70133db5 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslServerAuthenticationProvider.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/security/provider/example/ShadeSaslServerAuthenticationProvider.java @@ -20,6 +20,7 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.Map; @@ -86,8 +87,8 @@ Map readPasswordDB(Configuration conf) throws IOException { } Map passwordDb = new HashMap<>(); - try (FSDataInputStream fdis = fs.open(passwordFile); - BufferedReader reader = new BufferedReader(new InputStreamReader(fdis))) { + try (FSDataInputStream fdis = fs.open(passwordFile); BufferedReader reader = + new BufferedReader(new InputStreamReader(fdis, StandardCharsets.UTF_8))) { String line = null; int offset = 0; while ((line = reader.readLine()) != null) { diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java index 204a523e2169..ca686fb1749b 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java @@ -41,15 +41,12 @@ import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.yetus.audience.InterfaceAudience; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * See the instructions under hbase-examples/README.txt */ @InterfaceAudience.Private public class DemoClient { - private static final Logger LOG = LoggerFactory.getLogger(DemoClient.class); static protected int port; static protected String host; @@ -128,15 +125,15 @@ private void run() throws Exception { System.out.println("scanning tables..."); for (ByteBuffer name : client.getTableNames()) { - System.out.println(" found: " + ClientUtils.utf8(name.array())); + System.out.println(" found: " + ClientUtils.utf8(name)); if (name.equals(demoTable) || name.equals(disabledTable)) { if (client.isTableEnabled(name)) { - System.out.println(" disabling table: " + ClientUtils.utf8(name.array())); + System.out.println(" disabling table: " + ClientUtils.utf8(name)); client.disableTable(name); } - System.out.println(" deleting table: " + ClientUtils.utf8(name.array())); + System.out.println(" deleting table: " + ClientUtils.utf8(name)); client.deleteTable(name); } } @@ -324,7 +321,7 @@ private void run() throws Exception { columnNames.clear(); for (ColumnDescriptor col2 : client.getColumnDescriptors(demoTable).values()) { - System.out.println("column with name: " + new String(col2.name.array())); + System.out.println("column with name: " + ClientUtils.utf8(col2.name)); System.out.println(col2.toString()); columnNames.add(col2.name); @@ -356,7 +353,7 @@ private void printVersions(ByteBuffer row, List versions) { rowStr.append("; "); } - System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr); + System.out.println("row: " + ClientUtils.utf8(row) + ", values: " + rowStr); } private void printRow(TRowResult rowResult) { diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java index 5cb4a3f4a49a..25f5511a1dd1 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java @@ -25,7 +25,6 @@ import java.util.Base64; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Set; import javax.security.auth.Subject; @@ -36,8 +35,6 @@ import org.apache.hadoop.hbase.thrift.generated.AlreadyExists; import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; import org.apache.hadoop.hbase.thrift.generated.Hbase; -import org.apache.hadoop.hbase.thrift.generated.TCell; -import org.apache.hadoop.hbase.thrift.generated.TRowResult; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClientUtils; import org.apache.thrift.protocol.TBinaryProtocol; @@ -129,13 +126,13 @@ private void run() throws Exception { // System.out.println("scanning tables..."); for (ByteBuffer name : refresh(client, httpClient).getTableNames()) { - System.out.println(" found: " + ClientUtils.utf8(name.array())); - if (ClientUtils.utf8(name.array()).equals(ClientUtils.utf8(t))) { + System.out.println(" found: " + ClientUtils.utf8(name)); + if (ClientUtils.utf8(name).equals(ClientUtils.utf8(t))) { if (refresh(client, httpClient).isTableEnabled(name)) { - System.out.println(" disabling table: " + ClientUtils.utf8(name.array())); + System.out.println(" disabling table: " + ClientUtils.utf8(name)); refresh(client, httpClient).disableTable(name); } - System.out.println(" deleting table: " + ClientUtils.utf8(name.array())); + System.out.println(" deleting table: " + ClientUtils.utf8(name)); refresh(client, httpClient).deleteTable(name); } } @@ -167,8 +164,8 @@ private void run() throws Exception { Map columnMap = refresh(client, httpClient).getColumnDescriptors(ByteBuffer.wrap(t)); for (ColumnDescriptor col2 : columnMap.values()) { - System.out.println( - " column: " + ClientUtils.utf8(col2.name.array()) + ", maxVer: " + col2.maxVersions); + System.out + .println(" column: " + ClientUtils.utf8(col2.name) + ", maxVer: " + col2.maxVersions); } transport.close(); @@ -205,26 +202,13 @@ private String generateTicket() throws GSSException { context.requestInteg(true); final byte[] outToken = context.initSecContext(new byte[0], 0, 0); - StringBuffer outputBuffer = new StringBuffer(); + StringBuilder outputBuffer = new StringBuilder(); outputBuffer.append("Negotiate "); outputBuffer.append(Bytes.toString(Base64.getEncoder().encode(outToken))); System.out.print("Ticket is: " + outputBuffer); return outputBuffer.toString(); } - private void printVersions(ByteBuffer row, List versions) { - StringBuilder rowStr = new StringBuilder(); - for (TCell cell : versions) { - rowStr.append(ClientUtils.utf8(cell.value.array())); - rowStr.append("; "); - } - System.out.println("row: " + ClientUtils.utf8(row.array()) + ", values: " + rowStr); - } - - private void printRow(TRowResult rowResult) { - ClientUtils.printRow(rowResult); - } - static Subject getSubject() throws Exception { if (!secure) { return new Subject(); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java index 293ef7e26c08..fb6df302acce 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift2/DemoClient.java @@ -140,11 +140,11 @@ public void run() throws Exception { TResult result = client.get(table, get); - System.out.print("row = " + new String(result.getRow())); + System.out.print("row = " + ClientUtils.utf8(result.getRow())); for (TColumnValue resultColumnValue : result.getColumnValues()) { - System.out.print("family = " + new String(resultColumnValue.getFamily())); - System.out.print("qualifier = " + new String(resultColumnValue.getFamily())); - System.out.print("value = " + new String(resultColumnValue.getValue())); + System.out.print("family = " + ClientUtils.utf8(resultColumnValue.getFamily())); + System.out.print("qualifier = " + ClientUtils.utf8(resultColumnValue.getFamily())); + System.out.print("value = " + ClientUtils.utf8(resultColumnValue.getValue())); System.out.print("timestamp = " + resultColumnValue.getTimestamp()); } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java index b0f13cab2c7e..dc232bd47f75 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java @@ -20,7 +20,6 @@ import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; -import java.util.SortedMap; import java.util.TreeMap; import javax.security.auth.Subject; import javax.security.auth.login.AppConfigurationEntry; @@ -87,7 +86,7 @@ public static void printRow(final TRowResult rowResult) { } StringBuilder rowStr = new StringBuilder(); - for (SortedMap.Entry entry : sorted.entrySet()) { + for (Map.Entry entry : sorted.entrySet()) { rowStr.append(entry.getKey()); rowStr.append(" => "); rowStr.append(utf8(entry.getValue().value.array())); @@ -98,8 +97,8 @@ public static void printRow(final TRowResult rowResult) { } /** - * Helper to translate byte[]'s to UTF8 strings - * @param buf byte array buffer + * Helper to translate byte[]s to UTF8 strings + * @param buf byte array * @return UTF8 decoded string value */ public static String utf8(final byte[] buf) { @@ -110,4 +109,17 @@ public static String utf8(final byte[] buf) { } } + /** + * Helper to translate byte[]s to UTF8 strings + * @param buf byte buffer + * @return UTF8 decoded string value + */ + public static String utf8(final ByteBuffer buf) { + try { + return Bytes.toString(buf); + } catch (IllegalArgumentException e) { + return "[INVALID UTF-8]"; + } + } + } diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserverTestBase.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserverTestBase.java index abd199e22eee..5fe06f31b176 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserverTestBase.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserverTestBase.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertEquals; import java.io.IOException; +import java.io.InterruptedIOException; import java.io.UncheckedIOException; import java.util.stream.IntStream; import org.apache.hadoop.hbase.HBaseTestingUtil; @@ -78,6 +79,7 @@ private static void increment(int sleepSteps) throws IOException { try { Thread.sleep(10); } catch (InterruptedException e) { + throw (IOException) new InterruptedIOException().initCause(e); } } } diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/security/provider/example/TestShadeSaslAuthenticationProvider.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/security/provider/example/TestShadeSaslAuthenticationProvider.java index dc1a133a12f2..a479310691b1 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/security/provider/example/TestShadeSaslAuthenticationProvider.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/security/provider/example/TestShadeSaslAuthenticationProvider.java @@ -29,6 +29,7 @@ import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.StringWriter; +import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; @@ -128,8 +129,8 @@ static void createUserDBFile(FileSystem fs, Path p, Map userData if (fs.exists(p)) { fs.delete(p, true); } - try (FSDataOutputStream out = fs.create(p); - BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out))) { + try (FSDataOutputStream out = fs.create(p); BufferedWriter writer = + new BufferedWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8))) { for (Entry e : userDatabase.entrySet()) { writer.write(e.getKey()); writer.write(ShadeSaslServerAuthenticationProvider.SEPARATOR); From 62245fb7d8b70a6d7ac39f8ebe3ec0e7856f0512 Mon Sep 17 00:00:00 2001 From: Andrew Purtell Date: Mon, 25 Jul 2022 17:27:49 -0700 Subject: [PATCH 2/3] Address review feedback --- .../hbase/coprocessor/example/RowCountEndpoint.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java index df0c5d72aafc..40e16b79d27c 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java @@ -22,7 +22,6 @@ import java.util.Collections; import java.util.List; import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.mutable.MutableLong; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -36,6 +35,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hbase.thirdparty.com.google.common.collect.Iterables; import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback; import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.Service; @@ -117,13 +117,13 @@ public void getKeyValueCount(RpcController controller, CountRequest request, scanner = env.getRegion().getScanner(new Scan()); List results = new ArrayList<>(); boolean hasMore = false; - MutableLong count = new MutableLong(); + long count = 0; do { hasMore = scanner.next(results); - results.forEach((r) -> count.increment()); + count += Iterables.size(results); results.clear(); } while (hasMore); - response = CountResponse.newBuilder().setCount(count.longValue()).build(); + response = CountResponse.newBuilder().setCount(count).build(); } catch (IOException ioe) { CoprocessorRpcUtils.setControllerException(controller, ioe); } finally { From a1f36acd83606023ecf2302b259ef759a7f13f9e Mon Sep 17 00:00:00 2001 From: Andrew Purtell Date: Wed, 27 Jul 2022 16:17:21 -0700 Subject: [PATCH 3/3] More review feedback --- .../org/apache/hadoop/hbase/util/Bytes.java | 19 ++----------------- .../apache/hadoop/hbase/util/ClientUtils.java | 8 ++------ 2 files changed, 4 insertions(+), 23 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index ac36f83f89dd..6fd2b01a078b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -485,7 +485,8 @@ private static byte[] readBytes(ByteBuffer buf) { } /** - * Returns String made from b + * @param b Presumed UTF-8 encoded byte array. + * @return String made from b */ public static String toString(final byte[] b) { if (b == null) { @@ -494,21 +495,6 @@ public static String toString(final byte[] b) { return toString(b, 0, b.length); } - /** - * Returns String made from b - */ - public static String toString(ByteBuffer buf) { - if (buf == null) { - return null; - } - if (!buf.hasArray()) { - buf = ByteBuffer.wrap(buf.array(), buf.arrayOffset(), buf.remaining()); - return toString(buf.array()); - } else { - return toString(buf.array(), buf.arrayOffset(), buf.remaining()); - } - } - /** * Joins two byte arrays together using a separator. * @param b1 The first byte array. @@ -2446,5 +2432,4 @@ public static int findCommonPrefix(byte[] left, byte[] right, int leftLength, in } return result; } - } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java index dc232bd47f75..c5d72d6882f5 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/util/ClientUtils.java @@ -110,16 +110,12 @@ public static String utf8(final byte[] buf) { } /** - * Helper to translate byte[]s to UTF8 strings + * Helper to translate a byte buffer to UTF8 strings * @param buf byte buffer * @return UTF8 decoded string value */ public static String utf8(final ByteBuffer buf) { - try { - return Bytes.toString(buf); - } catch (IllegalArgumentException e) { - return "[INVALID UTF-8]"; - } + return buf.toString(); } }