From 578337b9fa26f735ad92696358f8fade02eab7d2 Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Sun, 30 Jun 2024 23:41:27 +0800 Subject: [PATCH] HBASE-28684 Remove CellWrapper and use ExtendedCell internally in client side data structure --- .../mapreduce/MapReduceHFileSplitterJob.java | 3 +- .../client/AsyncBatchRpcRetryingCaller.java | 10 +- .../apache/hadoop/hbase/client/Delete.java | 25 +- .../apache/hadoop/hbase/client/Increment.java | 10 +- .../apache/hadoop/hbase/client/Mutation.java | 222 +++------------ .../client/PackagePrivateFieldAccessor.java | 11 + .../org/apache/hadoop/hbase/client/Put.java | 21 +- .../apache/hadoop/hbase/client/Result.java | 47 +++- .../org/apache/hadoop/hbase/ipc/Call.java | 13 +- .../hadoop/hbase/ipc/CellBlockBuilder.java | 18 +- .../ipc/DelegatingHBaseRpcController.java | 8 +- .../hadoop/hbase/ipc/HBaseRpcController.java | 12 +- .../hbase/ipc/HBaseRpcControllerImpl.java | 25 +- .../hadoop/hbase/ipc/RpcConnection.java | 4 +- .../hbase/ipc/RpcControllerFactory.java | 12 +- .../hbase/shaded/protobuf/ProtobufUtil.java | 5 +- .../shaded/protobuf/RequestConverter.java | 38 +-- .../hadoop/hbase/client/TestMutation.java | 12 +- .../hbase/ipc/TestCellBlockBuilder.java | 28 +- .../hbase/ipc/TestHBaseRpcControllerImpl.java | 30 ++- .../shaded/protobuf/TestProtobufUtil.java | 11 +- .../hadoop/hbase/ByteBufferKeyValue.java | 4 +- .../hadoop/hbase/CellComparatorImpl.java | 2 +- .../org/apache/hadoop/hbase/CellUtil.java | 25 +- .../hadoop/hbase/ExtendedCellScannable.java | 37 +++ .../hadoop/hbase/ExtendedCellScanner.java | 37 +++ .../org/apache/hadoop/hbase/KeyValue.java | 4 +- .../apache/hadoop/hbase/KeyValueTestUtil.java | 7 +- .../org/apache/hadoop/hbase/KeyValueUtil.java | 11 +- .../apache/hadoop/hbase/PrivateCellUtil.java | 254 ++++++++++++++---- .../java/org/apache/hadoop/hbase/TagUtil.java | 6 +- .../hadoop/hbase/codec/BaseDecoder.java | 10 +- .../hadoop/hbase/codec/BaseEncoder.java | 4 +- .../apache/hadoop/hbase/codec/CellCodec.java | 6 +- .../hadoop/hbase/codec/CellCodecWithTags.java | 6 +- .../org/apache/hadoop/hbase/codec/Codec.java | 4 +- .../hadoop/hbase/codec/KeyValueCodec.java | 14 +- .../hbase/codec/KeyValueCodecWithTags.java | 10 +- .../hadoop/hbase/io/CellOutputStream.java | 4 +- ...ner.java => SizedExtendedCellScanner.java} | 6 +- .../hbase/io/encoding/EncodedDataBlock.java | 8 +- .../hadoop/hbase/io/encoding/NoneEncoder.java | 4 +- .../hadoop/hbase/TestCellComparator.java | 2 +- .../org/apache/hadoop/hbase/TestCellUtil.java | 165 ++---------- .../hadoop/hbase/codec/TestCellCodec.java | 12 +- .../hbase/codec/TestCellCodecWithTags.java | 16 +- .../codec/TestKeyValueCodecWithTags.java | 4 +- .../client/TestRpcControllerFactory.java | 9 +- .../hbase/mapreduce/CellSerialization.java | 8 +- .../hbase/mapreduce/CellSortReducer.java | 18 +- .../mapreduce/ExtendedCellSerialization.java | 2 +- .../hbase/mapreduce/HFileOutputFormat2.java | 4 +- .../apache/hadoop/hbase/mapreduce/Import.java | 30 ++- .../hadoop/hbase/mapreduce/PutCombiner.java | 52 ++-- .../hbase/mapreduce/PutSortReducer.java | 11 +- .../hadoop/hbase/mapreduce/WALPlayer.java | 3 +- .../hbase/util/MapReduceExtendedCell.java | 21 +- .../mapreduce/TestHFileOutputFormat2.java | 3 +- .../hbase/mapreduce/TestImportExport.java | 22 +- .../AsyncRegionReplicationRetryingCaller.java | 4 +- .../hbase/client/AsyncRegionServerAdmin.java | 11 +- .../hadoop/hbase/codec/MessageCodec.java | 6 +- .../hbase/io/hfile/HFilePrettyPrinter.java | 3 +- .../apache/hadoop/hbase/ipc/CallRunner.java | 6 +- .../hadoop/hbase/ipc/NettyServerCall.java | 4 +- .../hbase/ipc/NettyServerRpcConnection.java | 4 +- .../org/apache/hadoop/hbase/ipc/RpcCall.java | 7 +- .../apache/hadoop/hbase/ipc/RpcServer.java | 3 +- .../hadoop/hbase/ipc/RpcServerInterface.java | 5 +- .../apache/hadoop/hbase/ipc/ServerCall.java | 12 +- .../hadoop/hbase/ipc/ServerRpcConnection.java | 6 +- .../hadoop/hbase/ipc/SimpleServerCall.java | 4 +- .../hbase/ipc/SimpleServerRpcConnection.java | 4 +- .../org/apache/hadoop/hbase/mob/MobUtils.java | 8 +- .../protobuf/ReplicationProtobufUtil.java | 35 +-- .../hadoop/hbase/regionserver/HRegion.java | 22 +- .../regionserver/MobReferenceOnlyFilter.java | 5 +- .../hbase/regionserver/RSRpcServices.java | 29 +- .../regionserver/ReversedMobStoreScanner.java | 2 +- .../wal/AsyncProtobufLogWriter.java | 3 +- .../regionserver/wal/ProtobufLogWriter.java | 3 +- .../regionserver/wal/SecureWALCellCodec.java | 5 +- .../hbase/regionserver/wal/WALCellCodec.java | 8 +- .../ReplaySyncReplicationWALCallable.java | 7 +- .../security/access/AccessController.java | 12 +- .../hbase/security/access/AuthManager.java | 5 +- .../security/access/PermissionStorage.java | 3 +- ...estCustomPriorityRpcControllerFactory.java | 4 +- .../hbase/client/TestFromClientSide.java | 5 +- .../client/TestIncrementsFromClientSide.java | 5 +- .../client/TestMalformedCellFromClient.java | 7 +- .../hadoop/hbase/codec/CodecPerformance.java | 23 +- .../hbase/codec/TestCellMessageCodec.java | 12 +- .../TestPostIncrementAndAppendBeforeWAL.java | 13 +- .../hadoop/hbase/filter/TestFilterList.java | 7 +- .../hbase/io/compress/HFileTestBase.java | 4 +- .../TestBufferedDataBlockEncoder.java | 8 +- .../io/encoding/TestDataBlockEncoders.java | 10 +- .../hbase/io/hfile/TestHFileEncryption.java | 4 +- .../io/hfile/TestScannerFromBucketCache.java | 26 +- .../hfile/TestSeekBeforeWithInlineBlocks.java | 6 +- .../hadoop/hbase/io/hfile/TestSeekTo.java | 3 +- .../hadoop/hbase/ipc/AbstractTestIPC.java | 14 +- .../ipc/TestNettyChannelWritability.java | 11 +- .../hbase/ipc/TestProtobufRpcServiceImpl.java | 12 +- .../hadoop/hbase/master/MockRegionServer.java | 9 +- .../hbase/mob/TestMobStoreCompaction.java | 13 +- .../namequeues/TestNamedQueueRecorder.java | 6 +- .../hbase/namequeues/TestRpcLogDetails.java | 6 +- .../region/TestRegionProcedureStore.java | 6 +- .../protobuf/TestReplicationProtobuf.java | 10 +- .../regionserver/DataBlockEncodingTool.java | 10 +- .../EncodedSeekPerformanceTest.java | 5 +- .../hbase/regionserver/TestBlocksScanned.java | 6 +- .../hbase/regionserver/TestCellFlatSet.java | 12 +- .../hbase/regionserver/TestHMobStore.java | 12 +- .../hbase/regionserver/TestHRegion.java | 33 +-- .../TestHRegionServerBulkLoad.java | 5 +- .../hadoop/hbase/regionserver/TestHStore.java | 34 +-- .../hbase/regionserver/TestHStoreFile.java | 5 +- .../hbase/regionserver/TestKeepDeletes.java | 11 +- .../regionserver/TestMultiColumnScanner.java | 9 +- .../regionserver/TestReplicateToReplica.java | 4 +- ...estStoreFileScannerWithTagCompression.java | 4 +- .../hadoop/hbase/regionserver/TestTags.java | 20 +- .../compactions/TestCompactor.java | 4 +- .../replication/TestReplicationWithTags.java | 5 +- ...ExpAsStringVisibilityLabelServiceImpl.java | 5 +- .../TestVisibilityLabelsReplication.java | 12 +- .../hadoop/hbase/util/HFileTestUtil.java | 3 +- .../hadoop/hbase/thrift2/ThriftUtilities.java | 16 +- 131 files changed, 1064 insertions(+), 1011 deletions(-) create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellScannable.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellScanner.java rename hbase-common/src/main/java/org/apache/hadoop/hbase/io/{SizedCellScanner.java => SizedExtendedCellScanner.java} (85%) diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java index 04180972885e..2c073c56f7eb 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -76,7 +77,7 @@ static class HFileCellMapper extends Mapper actions, int tries) { } private ClientProtos.MultiRequest buildReq(Map actionsByRegion, - List cells, Map indexMap) throws IOException { + List cells, Map indexMap) throws IOException { ClientProtos.MultiRequest.Builder multiRequestBuilder = ClientProtos.MultiRequest.newBuilder(); ClientProtos.RegionAction.Builder regionActionBuilder = ClientProtos.RegionAction.newBuilder(); ClientProtos.Action.Builder actionBuilder = ClientProtos.Action.newBuilder(); @@ -382,7 +382,7 @@ private void sendToServer(ServerName serverName, ServerRequest serverReq, int tr return; } ClientProtos.MultiRequest req; - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); // Map from a created RegionAction to the original index for a RowMutations within // the original list of actions. This will be used to process the results when there // is RowMutations/CheckAndMutate in the action list. @@ -398,7 +398,7 @@ private void sendToServer(ServerName serverName, ServerRequest serverReq, int tr calcPriority(serverReq.getPriority(), tableName), tableName); controller.setRequestAttributes(requestAttributes); if (!cells.isEmpty()) { - controller.setCellScanner(createCellScanner(cells)); + controller.setCellScanner(PrivateCellUtil.createExtendedCellScanner(cells)); } stub.multi(controller, req, resp -> { if (controller.failed()) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java index f97db8a116d6..65ac34e4a0ed 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.security.access.Permission; @@ -171,10 +172,8 @@ public Delete addFamily(final byte[] family) { * @return this for invocation chaining */ public Delete addFamily(final byte[] family, final long timestamp) { - if (timestamp < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp); - } - List list = getCellList(family); + checkTimestamp(ts); + List list = getCellList(family); if (!list.isEmpty()) { list.clear(); } @@ -190,10 +189,8 @@ public Delete addFamily(final byte[] family, final long timestamp) { * @return this for invocation chaining */ public Delete addFamilyVersion(final byte[] family, final long timestamp) { - if (timestamp < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp); - } - List list = getCellList(family); + checkTimestamp(ts); + List list = getCellList(family); list.add(new KeyValue(row, family, null, timestamp, KeyValue.Type.DeleteFamilyVersion)); return this; } @@ -218,10 +215,8 @@ public Delete addColumns(final byte[] family, final byte[] qualifier) { * @return this for invocation chaining */ public Delete addColumns(final byte[] family, final byte[] qualifier, final long timestamp) { - if (timestamp < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp); - } - List list = getCellList(family); + checkTimestamp(ts); + List list = getCellList(family); list.add(new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.DeleteColumn)); return this; } @@ -247,10 +242,8 @@ public Delete addColumn(final byte[] family, final byte[] qualifier) { * @return this for invocation chaining */ public Delete addColumn(byte[] family, byte[] qualifier, long timestamp) { - if (timestamp < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp); - } - List list = getCellList(family); + checkTimestamp(ts); + List list = getCellList(family); KeyValue kv = new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.Delete); list.add(kv); return this; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java index aad853f8c06c..fc03180f5200 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java @@ -21,12 +21,14 @@ import java.util.List; import java.util.Map; import java.util.NavigableMap; +import java.util.Objects; import java.util.TreeMap; import java.util.UUID; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.security.access.Permission; @@ -114,10 +116,8 @@ public Increment add(Cell cell) throws IOException { * @return the Increment object */ public Increment addColumn(byte[] family, byte[] qualifier, long amount) { - if (family == null) { - throw new IllegalArgumentException("family cannot be null"); - } - List list = getCellList(family); + Objects.requireNonNull(family, "family cannot be null"); + List list = getCellList(family); KeyValue kv = createPutKeyValue(family, qualifier, ts, Bytes.toBytes(amount)); list.add(kv); return this; @@ -224,7 +224,7 @@ public String toString() { } sb.append(", families="); boolean moreThanOne = false; - for (Map.Entry> entry : this.familyMap.entrySet()) { + for (Map.Entry> entry : this.familyMap.entrySet()) { if (moreThanOne) { sb.append("), "); } else { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java index 0be0325d499f..349857fadb10 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java @@ -22,11 +22,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NavigableMap; -import java.util.Optional; import java.util.TreeMap; import java.util.UUID; import java.util.stream.Collectors; @@ -34,10 +32,10 @@ import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; -import org.apache.hadoop.hbase.CellScannable; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.IndividualBytesFieldCell; import org.apache.hadoop.hbase.KeyValue; @@ -66,7 +64,7 @@ @InterfaceAudience.Public public abstract class Mutation extends OperationWithAttributes - implements Row, CellScannable, HeapSize { + implements Row, ExtendedCellScannable, HeapSize { public static final long MUTATION_OVERHEAD = ClassSize.align( // This ClassSize.OBJECT + @@ -96,13 +94,12 @@ public abstract class Mutation extends OperationWithAttributes private static final String RETURN_RESULTS = "_rr_"; // TODO: row should be final - protected byte[] row = null; + protected byte[] row; protected long ts = HConstants.LATEST_TIMESTAMP; protected Durability durability = Durability.USE_DEFAULT; - // TODO: familyMap should be final // A Map sorted by column family. - protected NavigableMap> familyMap; + protected final NavigableMap> familyMap; /** * empty construction. We need this empty construction to keep binary compatibility. @@ -115,7 +112,7 @@ protected Mutation(Mutation clone) { super(clone); this.row = clone.getRow(); this.ts = clone.getTimestamp(); - this.familyMap = clone.getFamilyCellMap().entrySet().stream() + this.familyMap = clone.familyMap.entrySet().stream() .collect(Collectors.toMap(e -> e.getKey(), e -> new ArrayList<>(e.getValue()), (k, v) -> { throw new RuntimeException("collisions!!!"); }, () -> new TreeMap<>(Bytes.BYTES_COMPARATOR))); @@ -127,18 +124,23 @@ protected Mutation(Mutation clone) { * @param ts timestamp * @param familyMap the map to collect all cells internally. CAN'T be null */ + @SuppressWarnings({ "unchecked", "rawtypes" }) protected Mutation(byte[] row, long ts, NavigableMap> familyMap) { this.row = Preconditions.checkNotNull(row); - if (row.length == 0) { - throw new IllegalArgumentException("Row can't be empty"); - } + Preconditions.checkArgument(row.length > 0, "Row can't be empty"); this.ts = ts; - this.familyMap = Preconditions.checkNotNull(familyMap); + // We do not allow other Cell types in HBase so here we just do a simple cast + this.familyMap = (NavigableMap) Preconditions.checkNotNull(familyMap); } + /** + * For client users: You should only use the return value as a + * {@link org.apache.hadoop.hbase.CellScanner}, {@link ExtendedCellScanner} is marked as + * IA.Private which means there is no guarantee about its API stability. + */ @Override - public CellScanner cellScanner() { - return CellUtil.createCellScanner(getFamilyCellMap()); + public ExtendedCellScanner cellScanner() { + return PrivateCellUtil.createExtendedCellScanner(familyMap); } /** @@ -147,13 +149,8 @@ public CellScanner cellScanner() { * @param family column family * @return a list of Cell objects, returns an empty list if one doesn't exist. */ - List getCellList(byte[] family) { - List list = getFamilyCellMap().get(family); - if (list == null) { - list = new ArrayList<>(); - getFamilyCellMap().put(family, list); - } - return list; + List getCellList(byte[] family) { + return familyMap.computeIfAbsent(family, k -> new ArrayList<>()); } /** @@ -218,7 +215,7 @@ public Map toMap(int maxCols) { map.put("row", Bytes.toStringBinary(this.row)); int colCount = 0; // iterate through all column families affected - for (Map.Entry> entry : getFamilyCellMap().entrySet()) { + for (Map.Entry> entry : familyMap.entrySet()) { // map from this family to details for each cell affected within the family List> qualifierDetails = new ArrayList<>(); columns.put(Bytes.toStringBinary(entry.getKey()), qualifierDetails); @@ -227,7 +224,7 @@ public Map toMap(int maxCols) { continue; } // add details for each cell - for (Cell cell : entry.getValue()) { + for (ExtendedCell cell : entry.getValue()) { if (--maxCols <= 0) { continue; } @@ -250,7 +247,7 @@ public Map toMap(int maxCols) { return map; } - private static Map cellToStringMap(Cell c) { + private static Map cellToStringMap(ExtendedCell c) { Map stringMap = new HashMap<>(); stringMap.put("qualifier", Bytes.toStringBinary(c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength())); @@ -283,8 +280,9 @@ public Durability getDurability() { /** * Method for retrieving the put's familyMap */ + @SuppressWarnings({ "unchecked", "rawtypes" }) public NavigableMap> getFamilyCellMap() { - return this.familyMap; + return (NavigableMap) this.familyMap; } /** @@ -517,9 +515,7 @@ protected long extraHeapSize() { * Set the timestamp of the delete. */ public Mutation setTimestamp(long timestamp) { - if (timestamp < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp); - } + checkTimestamp(ts); this.ts = timestamp; return this; } @@ -603,7 +599,7 @@ public List get(byte[] family, byte[] qualifier) { */ protected boolean has(byte[] family, byte[] qualifier, long ts, byte[] value, boolean ignoreTS, boolean ignoreValue) { - List list = getCellList(family); + List list = getCellList(family); if (list.isEmpty()) { return false; } @@ -613,7 +609,7 @@ protected boolean has(byte[] family, byte[] qualifier, long ts, byte[] value, bo // F T => 2 // F F => 1 if (!ignoreTS && !ignoreValue) { - for (Cell cell : list) { + for (ExtendedCell cell : list) { if ( CellUtil.matchingFamily(cell, family) && CellUtil.matchingQualifier(cell, qualifier) && CellUtil.matchingValue(cell, value) && cell.getTimestamp() == ts @@ -692,6 +688,10 @@ static void checkRow(ByteBuffer row) { } } + protected final void checkTimestamp(long ts) { + Preconditions.checkArgument(ts >= 0, "Timestamp cannot be negative. ts=%s", ts); + } + Mutation add(Cell cell) throws IOException { // Checking that the row of the kv is the same as the mutation // TODO: It is fraught with risk if user pass the wrong row. @@ -714,9 +714,9 @@ Mutation add(Cell cell) throws IOException { } if (cell instanceof ExtendedCell) { - getCellList(family).add(cell); + getCellList(family).add((ExtendedCell) cell); } else { - getCellList(family).add(new CellWrapper(cell)); + throw new IllegalArgumentException("Unsupported cell type: " + cell.getClass().getName()); } return this; } @@ -743,7 +743,7 @@ public CellBuilder getCellBuilder() { * @param cellType e.g Cell.Type.Put * @return CellBuilder which already has relevant Type and Row set. */ - protected CellBuilder getCellBuilder(CellBuilderType cellBuilderType, Cell.Type cellType) { + protected final CellBuilder getCellBuilder(CellBuilderType cellBuilderType, Cell.Type cellType) { CellBuilder builder = CellBuilderFactory.create(cellBuilderType).setRow(row).setType(cellType); return new CellBuilder() { @Override @@ -818,158 +818,4 @@ public CellBuilder clear() { } }; } - - private static final class CellWrapper implements ExtendedCell { - private static final long FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT // object header - + KeyValue.TIMESTAMP_SIZE // timestamp - + Bytes.SIZEOF_LONG // sequence id - + 1 * ClassSize.REFERENCE); // references to cell - private final Cell cell; - private long sequenceId; - private long timestamp; - - CellWrapper(Cell cell) { - assert !(cell instanceof ExtendedCell); - this.cell = cell; - this.sequenceId = cell.getSequenceId(); - this.timestamp = cell.getTimestamp(); - } - - @Override - public void setSequenceId(long seqId) { - sequenceId = seqId; - } - - @Override - public void setTimestamp(long ts) { - timestamp = ts; - } - - @Override - public void setTimestamp(byte[] ts) { - timestamp = Bytes.toLong(ts); - } - - @Override - public long getSequenceId() { - return sequenceId; - } - - @Override - public byte[] getValueArray() { - return cell.getValueArray(); - } - - @Override - public int getValueOffset() { - return cell.getValueOffset(); - } - - @Override - public int getValueLength() { - return cell.getValueLength(); - } - - @Override - public byte[] getTagsArray() { - return cell.getTagsArray(); - } - - @Override - public int getTagsOffset() { - return cell.getTagsOffset(); - } - - @Override - public int getTagsLength() { - return cell.getTagsLength(); - } - - @Override - public byte[] getRowArray() { - return cell.getRowArray(); - } - - @Override - public int getRowOffset() { - return cell.getRowOffset(); - } - - @Override - public short getRowLength() { - return cell.getRowLength(); - } - - @Override - public byte[] getFamilyArray() { - return cell.getFamilyArray(); - } - - @Override - public int getFamilyOffset() { - return cell.getFamilyOffset(); - } - - @Override - public byte getFamilyLength() { - return cell.getFamilyLength(); - } - - @Override - public byte[] getQualifierArray() { - return cell.getQualifierArray(); - } - - @Override - public int getQualifierOffset() { - return cell.getQualifierOffset(); - } - - @Override - public int getQualifierLength() { - return cell.getQualifierLength(); - } - - @Override - public long getTimestamp() { - return timestamp; - } - - @Override - public byte getTypeByte() { - return cell.getTypeByte(); - } - - @Override - public Optional getTag(byte type) { - return PrivateCellUtil.getTag(cell, type); - } - - @Override - public Iterator getTags() { - return PrivateCellUtil.tagsIterator(cell); - } - - @Override - public byte[] cloneTags() { - return PrivateCellUtil.cloneTags(cell); - } - - private long heapOverhead() { - return FIXED_OVERHEAD + ClassSize.ARRAY // row - + getFamilyLength() == 0 - ? 0 - : ClassSize.ARRAY + getQualifierLength() == 0 ? 0 - : ClassSize.ARRAY + getValueLength() == 0 ? 0 - : ClassSize.ARRAY + getTagsLength() == 0 ? 0 - : ClassSize.ARRAY; - } - - @Override - public long heapSize() { - return heapOverhead() + ClassSize.align(getRowLength()) + ClassSize.align(getFamilyLength()) - + ClassSize.align(getQualifierLength()) + ClassSize.align(getValueLength()) - + ClassSize.align(getTagsLength()); - } - } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.java index 56a8dd19fcc5..2f70fd692086 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PackagePrivateFieldAccessor.java @@ -17,6 +17,9 @@ */ package org.apache.hadoop.hbase.client; +import java.util.List; +import java.util.NavigableMap; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.yetus.audience.InterfaceAudience; /** @@ -38,4 +41,12 @@ public static void setMvccReadPoint(Scan scan, long mvccReadPoint) { public static long getMvccReadPoint(Scan scan) { return scan.getMvccReadPoint(); } + + public static ExtendedCell[] getExtendedRawCells(Result result) { + return result.rawExtendedCells(); + } + + public static NavigableMap> getExtendedFamilyCellMap(Mutation m) { + return m.familyMap; + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java index dc470069f90c..b97a023c3899 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.HeapSize; @@ -93,11 +94,9 @@ public Put(ByteBuffer row) { */ public Put(byte[] rowArray, int rowOffset, int rowLength, long ts) { checkRow(rowArray, rowOffset, rowLength); + checkTimestamp(ts); this.row = Bytes.copy(rowArray, rowOffset, rowLength); this.ts = ts; - if (ts < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts); - } } /** @@ -119,9 +118,7 @@ public Put(byte[] row, boolean rowIsImmutable) { */ public Put(byte[] row, long ts, boolean rowIsImmutable) { // Check and set timestamp - if (ts < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts); - } + checkTimestamp(ts); this.ts = ts; // Deal with row according to rowIsImmutable @@ -171,10 +168,8 @@ public Put addColumn(byte[] family, byte[] qualifier, byte[] value) { * @param value column value */ public Put addColumn(byte[] family, byte[] qualifier, long ts, byte[] value) { - if (ts < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts); - } - List list = getCellList(family); + checkTimestamp(ts); + List list = getCellList(family); KeyValue kv = createPutKeyValue(family, qualifier, ts, value); list.add(kv); return this; @@ -189,10 +184,8 @@ public Put addColumn(byte[] family, byte[] qualifier, long ts, byte[] value) { * @param value column value */ public Put addColumn(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) { - if (ts < 0) { - throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + ts); - } - List list = getCellList(family); + checkTimestamp(ts); + List list = getCellList(family); KeyValue kv = createPutKeyValue(family, qualifier, ts, value, null); list.add(kv); return this; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java index ec1d3f2b1fab..ba866812b889 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java @@ -32,9 +32,11 @@ import java.util.TreeMap; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -70,8 +72,8 @@ * {@link #copyFrom(Result)} */ @InterfaceAudience.Public -public class Result implements CellScannable, CellScanner { - private Cell[] cells; +public class Result implements ExtendedCellScannable, ExtendedCellScanner { + private ExtendedCell[] cells; private Boolean exists; // if the query was just to check existence. private boolean stale = false; @@ -142,7 +144,8 @@ public static Result create(List cells, Boolean exists, boolean if (exists != null) { return new Result(null, exists, stale, mayHaveMoreCellsInRow); } - return new Result(cells.toArray(new Cell[cells.size()]), null, stale, mayHaveMoreCellsInRow); + return new Result(cells.toArray(new ExtendedCell[cells.size()]), null, stale, + mayHaveMoreCellsInRow); } /** @@ -163,7 +166,7 @@ public static Result create(Cell[] cells, Boolean exists, boolean stale, if (exists != null) { return new Result(null, exists, stale, mayHaveMoreCellsInRow); } - return new Result(cells, null, stale, mayHaveMoreCellsInRow); + return new Result((ExtendedCell[]) cells, null, stale, mayHaveMoreCellsInRow); } public static Result createCursorResult(Cursor cursor) { @@ -176,7 +179,8 @@ private Result(Cursor cursor) { } /** Private ctor. Use {@link #create(Cell[])}. */ - private Result(Cell[] cells, Boolean exists, boolean stale, boolean mayHaveMoreCellsInRow) { + private Result(ExtendedCell[] cells, Boolean exists, boolean stale, + boolean mayHaveMoreCellsInRow) { this.cells = cells; this.exists = exists; this.stale = stale; @@ -212,6 +216,10 @@ public Cell[] rawCells() { return cells; } + ExtendedCell[] rawExtendedCells() { + return cells; + } + /** * Create a sorted list of the Cell's in this result. Since HBase 0.20.5 this is equivalent to * raw(). @@ -263,7 +271,7 @@ private byte[] notNullBytes(final byte[] bytes) { } } - protected int binarySearch(final Cell[] kvs, final byte[] family, final byte[] qualifier) { + private int binarySearch(final Cell[] kvs, final byte[] family, final byte[] qualifier) { byte[] familyNotNull = notNullBytes(family); byte[] qualifierNotNull = notNullBytes(qualifier); Cell searchTerm = PrivateCellUtil.createFirstOnRow(kvs[0].getRowArray(), kvs[0].getRowOffset(), @@ -294,7 +302,7 @@ protected int binarySearch(final Cell[] kvs, final byte[] family, final byte[] q * @param qlength qualifier length * @return the index where the value was found, or -1 otherwise */ - protected int binarySearch(final Cell[] kvs, final byte[] family, final int foffset, + private int binarySearch(final Cell[] kvs, final byte[] family, final int foffset, final int flength, final byte[] qualifier, final int qoffset, final int qlength) { double keyValueSize = @@ -734,12 +742,12 @@ public static void compareResults(Result res1, Result res2, boolean verbose) thr + ", " + res1.size() + " cells are compared to " + res2.size() + " cells"); } } - Cell[] ourKVs = res1.rawCells(); - Cell[] replicatedKVs = res2.rawCells(); + ExtendedCell[] ourKVs = res1.cells; + ExtendedCell[] replicatedKVs = res2.cells; for (int i = 0; i < res1.size(); i++) { if ( !ourKVs[i].equals(replicatedKVs[i]) || !CellUtil.matchingValue(ourKVs[i], replicatedKVs[i]) - || !CellUtil.matchingTags(ourKVs[i], replicatedKVs[i]) + || !PrivateCellUtil.matchingTags(ourKVs[i], replicatedKVs[i]) ) { if (verbose) { throw new Exception("This result was different: " + res1 + " compared to " + res2); @@ -824,18 +832,29 @@ public void copyFrom(Result other) { this.cells = other.cells; } + /** + * For client users: You should only use the return value as a + * {@link org.apache.hadoop.hbase.CellScanner}, {@link ExtendedCellScanner} is marked as + * IA.Private which means there is no guarantee about its API stability. + */ @Override - public CellScanner cellScanner() { + public ExtendedCellScanner cellScanner() { // Reset this.cellScannerIndex = INITIAL_CELLSCANNER_INDEX; return this; } + /** + * For client users: You should only use the return value as a {@link Cell}, {@link ExtendedCell} + * is marked as IA.Private which means there is no guarantee about its API stability. + */ @Override - public Cell current() { + public ExtendedCell current() { if ( isEmpty() || cellScannerIndex == INITIAL_CELLSCANNER_INDEX || cellScannerIndex >= cells.length - ) return null; + ) { + return null; + } return this.cells[cellScannerIndex]; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java index 980e708d235c..45a9b29daba2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java @@ -22,7 +22,7 @@ import java.util.Map; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.client.MetricsConnection; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.yetus.audience.InterfaceAudience; @@ -44,7 +44,7 @@ class Call { * Optionally has cells when making call. Optionally has cells set on response. Used passing cells * to the rpc and receiving the response. */ - CellScanner cells; + ExtendedCellScanner cells; @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "IS2_INCONSISTENT_SYNC", justification = "Direct access is only allowed after done") Message response; // value, null if error @@ -63,9 +63,10 @@ class Call { final Span span; Timeout timeoutTask; - Call(int id, final Descriptors.MethodDescriptor md, Message param, final CellScanner cells, - final Message responseDefaultType, int timeout, int priority, Map attributes, - RpcCallback callback, MetricsConnection.CallStats callStats) { + Call(int id, final Descriptors.MethodDescriptor md, Message param, + final ExtendedCellScanner cells, final Message responseDefaultType, int timeout, int priority, + Map attributes, RpcCallback callback, + MetricsConnection.CallStats callStats) { this.param = param; this.md = md; this.cells = cells; @@ -136,7 +137,7 @@ public void setException(IOException error) { * @param response return value of the call. * @param cells Can be null */ - public void setResponse(Message response, final CellScanner cells) { + public void setResponse(Message response, final ExtendedCellScanner cells) { synchronized (this) { if (done) { return; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java index e7364ca3b429..a52aa8693f68 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java @@ -26,8 +26,8 @@ import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.ByteBuffInputStream; @@ -110,7 +110,7 @@ public int size() { * been flipped and is ready for reading. Use limit to find total size. */ public ByteBuffer buildCellBlock(final Codec codec, final CompressionCodec compressor, - final CellScanner cellScanner) throws IOException { + final ExtendedCellScanner cellScanner) throws IOException { ByteBufferOutputStreamSupplier supplier = new ByteBufferOutputStreamSupplier(); if (buildCellBlock(codec, compressor, cellScanner, supplier)) { ByteBuffer bb = supplier.baos.getByteBuffer(); @@ -144,8 +144,8 @@ public int size() { } } - public ByteBuf buildCellBlock(Codec codec, CompressionCodec compressor, CellScanner cellScanner, - ByteBufAllocator alloc) throws IOException { + public ByteBuf buildCellBlock(Codec codec, CompressionCodec compressor, + ExtendedCellScanner cellScanner, ByteBufAllocator alloc) throws IOException { ByteBufOutputStreamSupplier supplier = new ByteBufOutputStreamSupplier(alloc); if (buildCellBlock(codec, compressor, cellScanner, supplier)) { return supplier.buf; @@ -155,7 +155,7 @@ public ByteBuf buildCellBlock(Codec codec, CompressionCodec compressor, CellScan } private boolean buildCellBlock(final Codec codec, final CompressionCodec compressor, - final CellScanner cellScanner, OutputStreamSupplier supplier) throws IOException { + final ExtendedCellScanner cellScanner, OutputStreamSupplier supplier) throws IOException { if (cellScanner == null) { return false; } @@ -171,7 +171,7 @@ private boolean buildCellBlock(final Codec codec, final CompressionCodec compres return true; } - private void encodeCellsTo(OutputStream os, CellScanner cellScanner, Codec codec, + private void encodeCellsTo(OutputStream os, ExtendedCellScanner cellScanner, Codec codec, CompressionCodec compressor) throws IOException { Compressor poolCompressor = null; try { @@ -212,7 +212,7 @@ private void encodeCellsTo(OutputStream os, CellScanner cellScanner, Codec codec * @throws IOException if encoding the cells fail */ public ByteBufferListOutputStream buildCellBlockStream(Codec codec, CompressionCodec compressor, - CellScanner cellScanner, ByteBuffAllocator allocator) throws IOException { + ExtendedCellScanner cellScanner, ByteBuffAllocator allocator) throws IOException { if (cellScanner == null) { return null; } @@ -235,7 +235,7 @@ public ByteBufferListOutputStream buildCellBlockStream(Codec codec, CompressionC * @return CellScanner to work against the content of cellBlock * @throws IOException if encoding fails */ - public CellScanner createCellScanner(final Codec codec, final CompressionCodec compressor, + public ExtendedCellScanner createCellScanner(final Codec codec, final CompressionCodec compressor, final byte[] cellBlock) throws IOException { // Use this method from Client side to create the CellScanner if (compressor != null) { @@ -258,7 +258,7 @@ public CellScanner createCellScanner(final Codec codec, final CompressionCodec c * out of the CellScanner will share the same ByteBuffer being passed. * @throws IOException if cell encoding fails */ - public CellScanner createCellScannerReusingBuffers(final Codec codec, + public ExtendedCellScanner createCellScannerReusingBuffers(final Codec codec, final CompressionCodec compressor, ByteBuff cellBlock) throws IOException { // Use this method from HRS to create the CellScanner // If compressed, decompress it first before passing it on else we will leak compression diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingHBaseRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingHBaseRpcController.java index 2b8839bf8462..5b220a24ec56 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingHBaseRpcController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/DelegatingHBaseRpcController.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.util.Map; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; @@ -74,12 +74,12 @@ public void notifyOnCancel(RpcCallback callback) { } @Override - public CellScanner cellScanner() { + public ExtendedCellScanner cellScanner() { return delegate.cellScanner(); } @Override - public void setCellScanner(CellScanner cellScanner) { + public void setCellScanner(ExtendedCellScanner cellScanner) { delegate.setCellScanner(cellScanner); } @@ -134,7 +134,7 @@ public IOException getFailed() { } @Override - public void setDone(CellScanner cellScanner) { + public void setDone(ExtendedCellScanner cellScanner) { delegate.setDone(cellScanner); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcController.java index 4d3e038bb5ec..8fe44ca59cfc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcController.java @@ -19,8 +19,8 @@ import java.io.IOException; import java.util.Map; -import org.apache.hadoop.hbase.CellScannable; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -42,13 +42,13 @@ @InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX, HBaseInterfaceAudience.REPLICATION }) @InterfaceStability.Evolving -public interface HBaseRpcController extends RpcController, CellScannable { +public interface HBaseRpcController extends RpcController, ExtendedCellScannable { /** * Only used to send cells to rpc server, the returned cells should be set by - * {@link #setDone(CellScanner)}. + * {@link #setDone(ExtendedCellScanner)}. */ - void setCellScanner(CellScanner cellScanner); + void setCellScanner(ExtendedCellScanner cellScanner); /** * Set the priority for this operation. @@ -97,7 +97,7 @@ public interface HBaseRpcController extends RpcController, CellScannable { * IMPORTANT: always call this method if the call finished without any exception to tell * the {@code HBaseRpcController} that we are done. */ - void setDone(CellScanner cellScanner); + void setDone(ExtendedCellScanner cellScanner); /** * A little different from the basic RpcController: diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcControllerImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcControllerImpl.java index 54e9310b5ae7..0667ce2ee627 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcControllerImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcControllerImpl.java @@ -22,10 +22,10 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import org.apache.hadoop.hbase.CellScannable; -import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.yetus.audience.InterfaceAudience; @@ -72,28 +72,29 @@ public class HBaseRpcControllerImpl implements HBaseRpcController { * sometimes the scanner is backed by a List of Cells and other times, it is backed by an encoded * block that implements CellScanner. */ - private CellScanner cellScanner; + private ExtendedCellScanner cellScanner; private Map requestAttributes = Collections.emptyMap(); public HBaseRpcControllerImpl() { - this(null, (CellScanner) null); + this(null, (ExtendedCellScanner) null); } /** * Used server-side. Clients should go via {@link RpcControllerFactory} */ - public HBaseRpcControllerImpl(final CellScanner cellScanner) { + public HBaseRpcControllerImpl(final ExtendedCellScanner cellScanner) { this(null, cellScanner); } - HBaseRpcControllerImpl(RegionInfo regionInfo, final CellScanner cellScanner) { + HBaseRpcControllerImpl(RegionInfo regionInfo, final ExtendedCellScanner cellScanner) { this.cellScanner = cellScanner; this.regionInfo = regionInfo; } - HBaseRpcControllerImpl(RegionInfo regionInfo, final List cellIterables) { - this.cellScanner = cellIterables == null ? null : CellUtil.createCellScanner(cellIterables); + HBaseRpcControllerImpl(RegionInfo regionInfo, final List cellIterables) { + this.cellScanner = + cellIterables == null ? null : PrivateCellUtil.createExtendedCellScanner(cellIterables); this.regionInfo = null; } @@ -109,14 +110,14 @@ public RegionInfo getRegionInfo() { /** Returns One-shot cell scanner (you cannot back it up and restart) */ @Override - public CellScanner cellScanner() { + public ExtendedCellScanner cellScanner() { return cellScanner; } @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "IS2_INCONSISTENT_SYNC", justification = "The only possible race method is startCancel") @Override - public void setCellScanner(final CellScanner cellScanner) { + public void setCellScanner(final ExtendedCellScanner cellScanner) { this.cellScanner = cellScanner; } @@ -240,7 +241,7 @@ public synchronized IOException getFailed() { } @Override - public synchronized void setDone(CellScanner cellScanner) { + public synchronized void setDone(ExtendedCellScanner cellScanner) { if (done) { return; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java index 8017e99ec4ff..dbdb0e2037f8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcConnection.java @@ -34,7 +34,7 @@ import java.util.function.Consumer; import javax.security.sasl.SaslException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.MetricsConnection; import org.apache.hadoop.hbase.codec.Codec; @@ -382,7 +382,7 @@ private void finishCall(ResponseHeader respo } else { value = null; } - CellScanner cellBlockScanner; + ExtendedCellScanner cellBlockScanner; if (responseHeader.hasCellBlockMeta()) { int size = responseHeader.getCellBlockMeta().getLength(); // Maybe we could read directly from the ByteBuf. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java index a256769de703..1977f09abb11 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcControllerFactory.java @@ -19,8 +19,8 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellScannable; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.util.ReflectionUtils; @@ -54,20 +54,20 @@ public HBaseRpcController newController() { return new HBaseRpcControllerImpl(); } - public HBaseRpcController newController(CellScanner cellScanner) { + public HBaseRpcController newController(ExtendedCellScanner cellScanner) { return new HBaseRpcControllerImpl(null, cellScanner); } - public HBaseRpcController newController(RegionInfo regionInfo, CellScanner cellScanner) { + public HBaseRpcController newController(RegionInfo regionInfo, ExtendedCellScanner cellScanner) { return new HBaseRpcControllerImpl(regionInfo, cellScanner); } - public HBaseRpcController newController(final List cellIterables) { + public HBaseRpcController newController(final List cellIterables) { return new HBaseRpcControllerImpl(null, cellIterables); } public HBaseRpcController newController(RegionInfo regionInfo, - final List cellIterables) { + final List cellIterables) { return new HBaseRpcControllerImpl(regionInfo, cellIterables); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index d48b8b73a316..d3672c5e841b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -245,7 +246,7 @@ private ProtobufUtil() { * Many results are simple: no cell, exists true or false. To save on object creations, we reuse * them across calls. */ - private final static Cell[] EMPTY_CELL_ARRAY = new Cell[] {}; + private final static ExtendedCell[] EMPTY_CELL_ARRAY = new ExtendedCell[0]; private final static Result EMPTY_RESULT = Result.create(EMPTY_CELL_ARRAY); final static Result EMPTY_RESULT_EXISTS_TRUE = Result.create(null, true); final static Result EMPTY_RESULT_EXISTS_FALSE = Result.create(null, false); @@ -2024,7 +2025,7 @@ private static ByteString wrap(ByteBuffer b, int offset, int length) { return UnsafeByteOperations.unsafeWrap(dup); } - public static Cell toCell(ExtendedCellBuilder cellBuilder, final CellProtos.Cell cell, + public static ExtendedCell toCell(ExtendedCellBuilder cellBuilder, final CellProtos.Cell cell, boolean decodeTags) { ExtendedCellBuilder builder = cellBuilder.clear().setRow(cell.getRow().toByteArray()) .setFamily(cell.getFamily().toByteArray()).setQualifier(cell.getQualifier().toByteArray()) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java index ce12aaea0d24..b98094ad92a6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java @@ -27,11 +27,11 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.ClusterMetrics.Option; import org.apache.hadoop.hbase.ClusterMetricsBuilder; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellScannable; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.ServerName; @@ -469,7 +469,7 @@ public static BulkLoadHFileRequest buildBulkLoadHFileRequest( * RowMutations/CheckAndMutate within the original list of actions */ public static void buildNoDataRegionActions(final byte[] regionName, - final Iterable actions, final List cells, + final Iterable actions, final List cells, final MultiRequest.Builder multiRequestBuilder, final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder, long nonceGroup, final Map indexMap) throws IOException { @@ -609,17 +609,19 @@ public static void buildNoDataRegionActions(final byte[] regionName, } } - private static void buildNoDataRegionAction(final Put put, final List cells, - final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, - final MutationProto.Builder mutationBuilder) throws IOException { + private static void buildNoDataRegionAction(final Put put, + final List cells, final RegionAction.Builder regionActionBuilder, + final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder) + throws IOException { cells.add(put); regionActionBuilder.addAction(actionBuilder .setMutation(ProtobufUtil.toMutationNoData(MutationType.PUT, put, mutationBuilder))); } - private static void buildNoDataRegionAction(final Delete delete, final List cells, - final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, - final MutationProto.Builder mutationBuilder) throws IOException { + private static void buildNoDataRegionAction(final Delete delete, + final List cells, final RegionAction.Builder regionActionBuilder, + final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder) + throws IOException { int size = delete.size(); // Note that a legitimate Delete may have a size of zero; i.e. a Delete that has nothing // in it but the row to delete. In this case, the current implementation does not make @@ -637,18 +639,18 @@ private static void buildNoDataRegionAction(final Delete delete, final List cells, long nonce, final RegionAction.Builder regionActionBuilder, - final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder) - throws IOException { + final List cells, long nonce, + final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, + final MutationProto.Builder mutationBuilder) throws IOException { cells.add(increment); regionActionBuilder.addAction(actionBuilder.setMutation( ProtobufUtil.toMutationNoData(MutationType.INCREMENT, increment, mutationBuilder, nonce))); } - private static void buildNoDataRegionAction(final Append append, final List cells, - long nonce, final RegionAction.Builder regionActionBuilder, - final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder) - throws IOException { + private static void buildNoDataRegionAction(final Append append, + final List cells, long nonce, + final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, + final MutationProto.Builder mutationBuilder) throws IOException { cells.add(append); regionActionBuilder.addAction(actionBuilder.setMutation( ProtobufUtil.toMutationNoData(MutationType.APPEND, append, mutationBuilder, nonce))); @@ -656,9 +658,9 @@ private static void buildNoDataRegionAction(final Append append, final List cells, long nonce, final RegionAction.Builder regionActionBuilder, - final ClientProtos.Action.Builder actionBuilder, final MutationProto.Builder mutationBuilder) - throws IOException { + final List cells, long nonce, + final RegionAction.Builder regionActionBuilder, final ClientProtos.Action.Builder actionBuilder, + final MutationProto.Builder mutationBuilder) throws IOException { boolean ret = false; for (Mutation mutation : rowMutations.getMutations()) { mutationBuilder.clear(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java index 718ca05b92f3..5da59b144a5f 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java @@ -26,7 +26,9 @@ import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -124,13 +126,13 @@ public void testPutCopyConstructor() throws IOException { private void assertEquals(Mutation origin, Mutation clone) { Assert.assertEquals(origin.getFamilyCellMap().size(), clone.getFamilyCellMap().size()); for (byte[] family : origin.getFamilyCellMap().keySet()) { - List originCells = origin.getCellList(family); - List cloneCells = clone.getCellList(family); + List originCells = origin.getCellList(family); + List cloneCells = clone.getCellList(family); Assert.assertEquals(originCells.size(), cloneCells.size()); for (int i = 0; i != cloneCells.size(); ++i) { - Cell originCell = originCells.get(i); - Cell cloneCell = cloneCells.get(i); - assertTrue(CellUtil.equals(originCell, cloneCell)); + ExtendedCell originCell = originCells.get(i); + ExtendedCell cloneCell = cloneCells.get(i); + assertTrue(PrivateCellUtil.equals(originCell, cloneCell)); assertTrue(CellUtil.matchingValue(originCell, cloneCell)); } } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java index fccea923635a..c3a145ea6f33 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java @@ -21,18 +21,17 @@ import java.io.IOException; import java.nio.ByteBuffer; -import java.util.Arrays; import org.apache.commons.lang3.time.StopWatch; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.codec.KeyValueCodec; -import org.apache.hadoop.hbase.io.SizedCellScanner; +import org.apache.hadoop.hbase.io.SizedExtendedCellScanner; import org.apache.hadoop.hbase.nio.SingleByteBuff; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -78,10 +77,9 @@ static void doBuildCellBlockUndoCellBlock(final CellBlockBuilder builder, final static void doBuildCellBlockUndoCellBlock(final CellBlockBuilder builder, final Codec codec, final CompressionCodec compressor, final int count, final int size, final boolean sized) throws IOException { - Cell[] cells = getCells(count, size); - CellScanner cellScanner = sized - ? getSizedCellScanner(cells) - : CellUtil.createCellScanner(Arrays.asList(cells).iterator()); + ExtendedCell[] cells = getCells(count, size); + ExtendedCellScanner cellScanner = + sized ? getSizedCellScanner(cells) : PrivateCellUtil.createExtendedCellScanner(cells); ByteBuffer bb = builder.buildCellBlock(codec, compressor, cellScanner); cellScanner = builder.createCellScannerReusingBuffers(codec, compressor, new SingleByteBuff(bb)); @@ -92,21 +90,21 @@ static void doBuildCellBlockUndoCellBlock(final CellBlockBuilder builder, final assertEquals(count, i); } - static CellScanner getSizedCellScanner(final Cell[] cells) { + static ExtendedCellScanner getSizedCellScanner(final ExtendedCell[] cells) { int size = -1; for (Cell cell : cells) { size += PrivateCellUtil.estimatedSerializedSizeOf(cell); } final int totalSize = ClassSize.align(size); - final CellScanner cellScanner = CellUtil.createCellScanner(cells); - return new SizedCellScanner() { + final ExtendedCellScanner cellScanner = PrivateCellUtil.createExtendedCellScanner(cells); + return new SizedExtendedCellScanner() { @Override public long heapSize() { return totalSize; } @Override - public Cell current() { + public ExtendedCell current() { return cellScanner.current(); } @@ -117,12 +115,12 @@ public boolean advance() throws IOException { }; } - static Cell[] getCells(final int howMany) { + static ExtendedCell[] getCells(final int howMany) { return getCells(howMany, 1024); } - static Cell[] getCells(final int howMany, final int valueSize) { - Cell[] cells = new Cell[howMany]; + static ExtendedCell[] getCells(final int howMany, final int valueSize) { + ExtendedCell[] cells = new ExtendedCell[howMany]; byte[] value = new byte[valueSize]; for (int i = 0; i < howMany; i++) { byte[] index = Bytes.toBytes(i); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java index a0b68646b145..a1ef458b3b48 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java @@ -24,8 +24,10 @@ import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -43,7 +45,7 @@ public class TestHBaseRpcControllerImpl { @Test public void testListOfCellScannerables() throws IOException { final int count = 10; - List cells = new ArrayList<>(count); + List cells = new ArrayList<>(count); for (int i = 0; i < count; i++) { cells.add(createCell(i)); @@ -64,16 +66,16 @@ public void testListOfCellScannerables() throws IOException { * @param index the index of the cell to use as its value * @return A faked out 'Cell' that does nothing but return index as its value */ - static CellScannable createCell(final int index) { - return new CellScannable() { + static ExtendedCellScannable createCell(final int index) { + return new ExtendedCellScannable() { @Override - public CellScanner cellScanner() { - return new CellScanner() { + public ExtendedCellScanner cellScanner() { + return new ExtendedCellScanner() { @Override - public Cell current() { + public ExtendedCell current() { // Fake out a Cell. All this Cell has is a value that is an int in size and equal // to the above 'index' param serialized as an int. - return new Cell() { + return new ExtendedCell() { @Override public long heapSize() { return 0; @@ -180,6 +182,18 @@ public byte[] getTagsArray() { public Type getType() { return null; } + + @Override + public void setSequenceId(long seqId) throws IOException { + } + + @Override + public void setTimestamp(long ts) throws IOException { + } + + @Override + public void setTimestamp(byte[] ts) throws IOException { + } }; } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java index 2b4380dfbb6d..acc561812853 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparatorImpl; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseClassTestRule; @@ -505,7 +506,7 @@ public void testCellConversionWithTags() { CellProtos.Cell protoCell = ProtobufUtil.toCell(cell, true); assertNotNull(protoCell); - Cell decodedCell = getCellFromProtoResult(protoCell, true); + ExtendedCell decodedCell = getCellFromProtoResult(protoCell, true); List decodedTags = PrivateCellUtil.getTags(decodedCell); assertEquals(1, decodedTags.size()); Tag decodedTag = decodedTags.get(0); @@ -525,7 +526,7 @@ private Cell getCellWithTags() { return cellBuilder.build(); } - private Cell getCellFromProtoResult(CellProtos.Cell protoCell, boolean decodeTags) { + private ExtendedCell getCellFromProtoResult(CellProtos.Cell protoCell, boolean decodeTags) { ExtendedCellBuilder decodedBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY); return ProtobufUtil.toCell(decodedBuilder, protoCell, decodeTags); @@ -542,7 +543,7 @@ public void testCellConversionWithoutTags() { CellProtos.Cell protoCell = ProtobufUtil.toCell(cell, false); assertNotNull(protoCell); - Cell decodedCell = getCellFromProtoResult(protoCell, false); + ExtendedCell decodedCell = getCellFromProtoResult(protoCell, false); List decodedTags = PrivateCellUtil.getTags(decodedCell); assertEquals(0, decodedTags.size()); } @@ -558,7 +559,7 @@ public void testTagEncodeFalseDecodeTrue() { CellProtos.Cell protoCell = ProtobufUtil.toCell(cell, false); assertNotNull(protoCell); - Cell decodedCell = getCellFromProtoResult(protoCell, true); + ExtendedCell decodedCell = getCellFromProtoResult(protoCell, true); List decodedTags = PrivateCellUtil.getTags(decodedCell); assertEquals(0, decodedTags.size()); } @@ -574,7 +575,7 @@ public void testTagEncodeTrueDecodeFalse() { CellProtos.Cell protoCell = ProtobufUtil.toCell(cell, true); assertNotNull(protoCell); - Cell decodedCell = getCellFromProtoResult(protoCell, false); + ExtendedCell decodedCell = getCellFromProtoResult(protoCell, false); List decodedTags = PrivateCellUtil.getTags(decodedCell); assertEquals(0, decodedTags.size()); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java index 749f6b46782c..45e6f2d11d45 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java @@ -328,10 +328,10 @@ public ExtendedCell deepClone() { */ @Override public boolean equals(Object other) { - if (!(other instanceof Cell)) { + if (!(other instanceof ExtendedCell)) { return false; } - return CellUtil.equals(this, (Cell) other); + return PrivateCellUtil.equals(this, (ExtendedCell) other); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java index a94c3679f478..0cd0905cc3a6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java @@ -761,7 +761,7 @@ public int compareTimestamps(final long ltimestamp, final long rtimestamp) { } @Override - public Comparator getSimpleComparator() { + public Comparator getSimpleComparator() { return this; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 85f23550efc7..10213b143632 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -309,10 +309,14 @@ public Cell current() { public boolean advance() throws IOException { while (true) { if (this.cellScanner == null) { - if (!this.iterator.hasNext()) return false; + if (!this.iterator.hasNext()) { + return false; + } this.cellScanner = this.iterator.next().cellScanner(); } - if (this.cellScanner.advance()) return true; + if (this.cellScanner.advance()) { + return true; + } this.cellScanner = null; } } @@ -357,13 +361,17 @@ public static CellScanner createCellScanner(final Cell[] cellArray) { @Override public Cell current() { - if (cells == null) return null; + if (cells == null) { + return null; + } return (index < 0) ? null : this.cells[index]; } @Override public boolean advance() { - if (cells == null) return false; + if (cells == null) { + return false; + } return ++index < this.cells.length; } }; @@ -549,8 +557,13 @@ public static boolean matchingValue(final Cell left, final byte[] buf) { buf.length); } + /** + * @deprecated Since 3.0.0, will be removed in 4.0.0. Tags are now internal only, you should not + * try to check it through the {@link Cell} interface. + */ + @Deprecated public static boolean matchingTags(final Cell left, final Cell right) { - return PrivateCellUtil.matchingTags(left, right, left.getTagsLength(), right.getTagsLength()); + return PrivateCellUtil.matchingTags((ExtendedCell) left, (ExtendedCell) right); } /** @@ -662,7 +675,7 @@ public static String toString(Cell cell, boolean verbose) { public static boolean equals(Cell a, Cell b) { return matchingRows(a, b) && matchingFamily(a, b) && matchingQualifier(a, b) - && matchingTimestamp(a, b) && PrivateCellUtil.matchingType(a, b); + && matchingTimestamp(a, b) && a.getTypeByte() == b.getTypeByte(); } public static boolean matchingTimestamp(Cell a, Cell b) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellScannable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellScannable.java new file mode 100644 index 000000000000..c3772f166221 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellScannable.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.yetus.audience.InterfaceAudience; + +/** + * We use this class in HBase internally for getting {@link ExtendedCell} directly without casting. + *

+ * In general, all {@link Cell}s in HBase should and must be {@link ExtendedCell}. + *

+ * See HBASE-28684 and related issues for more details. + * @see CellScannable + * @see ExtendedCellScanner + * @see ExtendedCell + */ +@InterfaceAudience.Private +public interface ExtendedCellScannable extends CellScannable { + + @Override + ExtendedCellScanner cellScanner(); +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellScanner.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellScanner.java new file mode 100644 index 000000000000..f7011291d9ce --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellScanner.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.yetus.audience.InterfaceAudience; + +/** + * We use this class in HBase internally for getting {@link ExtendedCell} directly without casting. + *

+ * In general, all {@link Cell}s in HBase should and must be {@link ExtendedCell}. + *

+ * See HBASE-28684 and related issues for more details. + * @see CellScanner + * @see ExtendedCellScannable + * @see ExtendedCell + */ +@InterfaceAudience.Private +public interface ExtendedCellScanner extends CellScanner { + + @Override + ExtendedCell current(); +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 89e91ca80361..a87a5214fadf 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -978,10 +978,10 @@ private static byte[] createByteArray(final byte[] row, final int roffset, final */ @Override public boolean equals(Object other) { - if (!(other instanceof Cell)) { + if (!(other instanceof ExtendedCell)) { return false; } - return CellUtil.equals(this, (Cell) other); + return PrivateCellUtil.equals(this, (ExtendedCell) other); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java index ed3687e9ed4d..323cae954f06 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java @@ -63,9 +63,12 @@ public static boolean containsIgnoreMvccVersion(Collection kvCol for (Cell kv1 : kvCollection1) { boolean found = false; for (Cell kv2 : kvCollection2) { - if (PrivateCellUtil.equalsIgnoreMvccVersion(kv1, kv2)) found = true; + if (PrivateCellUtil.equalsIgnoreMvccVersion((ExtendedCell) kv1, (ExtendedCell) kv2)) + found = true; + } + if (!found) { + return false; } - if (!found) return false; } return true; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 4b61688abc28..6c8f2e6e4edb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -88,7 +88,7 @@ public static int totalLengthWithMvccVersion(final Iterable /**************** copy the cell to create a new keyvalue *********************/ - public static KeyValue copyToNewKeyValue(final Cell cell) { + public static KeyValue copyToNewKeyValue(final ExtendedCell cell) { byte[] bytes = copyToNewByteArray(cell); KeyValue kvCell = new KeyValue(bytes, 0, bytes.length); kvCell.setSequenceId(cell.getSequenceId()); @@ -120,7 +120,7 @@ public static KeyValue toNewKeyCell(final ExtendedCell cell) { return kv; } - public static byte[] copyToNewByteArray(final Cell cell) { + public static byte[] copyToNewByteArray(final ExtendedCell cell) { // Cell#getSerializedSize returns the serialized size of the Source cell, which may // not serialize all fields. We are constructing a KeyValue backing array here, // which does include all fields, and must allocate accordingly. @@ -133,7 +133,7 @@ public static byte[] copyToNewByteArray(final Cell cell) { return backingBytes; } - public static int appendKeyTo(final Cell cell, final byte[] output, final int offset) { + public static int appendKeyTo(final ExtendedCell cell, final byte[] output, final int offset) { int nextOffset = offset; nextOffset = Bytes.putShort(output, nextOffset, cell.getRowLength()); nextOffset = CellUtil.copyRowTo(cell, output, nextOffset); @@ -147,7 +147,8 @@ public static int appendKeyTo(final Cell cell, final byte[] output, final int of /**************** copy key and value *********************/ - public static int appendToByteArray(Cell cell, byte[] output, int offset, boolean withTags) { + public static int appendToByteArray(ExtendedCell cell, byte[] output, int offset, + boolean withTags) { int pos = offset; pos = Bytes.putInt(output, pos, keyLength(cell)); pos = Bytes.putInt(output, pos, cell.getValueLength()); @@ -416,7 +417,7 @@ public static KeyValue createFirstOnRow(byte[] buffer, final int boffset, final * @deprecated without any replacement. */ @Deprecated - public static KeyValue ensureKeyValue(final Cell cell) { + public static KeyValue ensureKeyValue(final ExtendedCell cell) { if (cell == null) return null; if (cell instanceof KeyValue) { if (cell.getClass().getName().equals(KeyValue.class.getName())) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java index 9013f9a9f26c..801e1d423480 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java @@ -29,6 +29,8 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.Map.Entry; +import java.util.NavigableMap; import java.util.Optional; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.io.TagCompressionContext; @@ -40,6 +42,8 @@ import org.apache.hadoop.hbase.util.ClassSize; import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; + /** * Utility methods helpful slinging {@link Cell} instances. It has more powerful and rich set of * APIs than those in {@link CellUtil} for internal usage. @@ -72,7 +76,7 @@ public static ByteRange fillValueRange(Cell cell, ByteRange range) { return range.set(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); } - public static ByteRange fillTagRange(Cell cell, ByteRange range) { + public static ByteRange fillTagRange(ExtendedCell cell, ByteRange range) { return range.set(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } @@ -582,8 +586,8 @@ public int write(OutputStream out, boolean withTags) throws IOException { * Made into a static method so as to reuse the logic within * ValueAndTagRewriteByteBufferExtendedCell */ - static int write(OutputStream out, boolean withTags, Cell cell, byte[] value, byte[] tags) - throws IOException { + static int write(OutputStream out, boolean withTags, ExtendedCell cell, byte[] value, + byte[] tags) throws IOException { int valLen = value == null ? 0 : value.length; ByteBufferUtils.putInt(out, KeyValueUtil.keyLength(cell));// Key length ByteBufferUtils.putInt(out, valLen);// Value length @@ -802,11 +806,16 @@ public static boolean matchingValue(final Cell left, final Cell right, int lvlen right.getValueArray(), right.getValueOffset(), rvlength); } - public static boolean matchingType(Cell a, Cell b) { + public static boolean matchingType(ExtendedCell a, ExtendedCell b) { return a.getTypeByte() == b.getTypeByte(); } - public static boolean matchingTags(final Cell left, final Cell right, int llength, int rlength) { + public static boolean matchingTags(final ExtendedCell left, final ExtendedCell right) { + return matchingTags(left, right, left.getTagsLength(), right.getTagsLength()); + } + + public static boolean matchingTags(final ExtendedCell left, final ExtendedCell right, int llength, + int rlength) { if (left instanceof ByteBufferExtendedCell && right instanceof ByteBufferExtendedCell) { ByteBufferExtendedCell leftBBCell = (ByteBufferExtendedCell) left; ByteBufferExtendedCell rightBBCell = (ByteBufferExtendedCell) right; @@ -840,7 +849,7 @@ public static boolean isDeleteType(ExtendedCell cell) { return cell.getTypeByte() == KeyValue.Type.Delete.getCode(); } - public static boolean isDeleteFamily(final Cell cell) { + public static boolean isDeleteFamily(final ExtendedCell cell) { return cell.getTypeByte() == KeyValue.Type.DeleteFamily.getCode(); } @@ -862,14 +871,14 @@ public static boolean isDeleteColumnOrFamily(ExtendedCell cell) { return t == KeyValue.Type.DeleteColumn.getCode() || t == KeyValue.Type.DeleteFamily.getCode(); } - public static byte[] cloneTags(Cell cell) { + public static byte[] cloneTags(ExtendedCell cell) { byte[] output = new byte[cell.getTagsLength()]; copyTagsTo(cell, output, 0); return output; } /** Copies the tags info into the tag portion of the cell */ - public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) { + public static int copyTagsTo(ExtendedCell cell, byte[] destination, int destinationOffset) { int tlen = cell.getTagsLength(); if (cell instanceof ByteBufferExtendedCell) { ByteBufferUtils.copyFromBufferToArray(destination, @@ -883,7 +892,7 @@ public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffse } /** Copies the tags info into the tag portion of the cell */ - public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) { + public static int copyTagsTo(ExtendedCell cell, ByteBuffer destination, int destinationOffset) { int tlen = cell.getTagsLength(); if (cell instanceof ByteBufferExtendedCell) { ByteBufferUtils.copyFromBufferToBuffer(((ByteBufferExtendedCell) cell).getTagsByteBuffer(), @@ -900,7 +909,7 @@ public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationO * @param cell The Cell * @return Tags in the given Cell as a List */ - public static List getTags(Cell cell) { + public static List getTags(ExtendedCell cell) { List tags = new ArrayList<>(); Iterator tagsItr = tagsIterator(cell); while (tagsItr.hasNext()) { @@ -915,7 +924,7 @@ public static List getTags(Cell cell) { * @param type Type of the Tag to retrieve * @return Optional, empty if there is no tag of the passed in tag type */ - public static Optional getTag(Cell cell, byte type) { + public static Optional getTag(ExtendedCell cell, byte type) { boolean bufferBacked = cell instanceof ByteBufferExtendedCell; int length = cell.getTagsLength(); int offset = @@ -946,7 +955,7 @@ public static Optional getTag(Cell cell, byte type) { * @param cell The Cell over which tags iterator is needed. * @return iterator for the tags */ - public static Iterator tagsIterator(final Cell cell) { + public static Iterator tagsIterator(final ExtendedCell cell) { final int tagsLength = cell.getTagsLength(); // Save an object allocation where we can if (tagsLength == 0) { @@ -1069,8 +1078,8 @@ public static void writeRowSkippingBytes(DataOutputStream out, Cell cell, short * @param withTsType when true check timestamp and type bytes also. * @return length of common prefix */ - public static int findCommonPrefixInFlatKey(Cell c1, Cell c2, boolean bypassFamilyCheck, - boolean withTsType) { + public static int findCommonPrefixInFlatKey(ExtendedCell c1, ExtendedCell c2, + boolean bypassFamilyCheck, boolean withTsType) { // Compare the 2 bytes in RK length part short rLen1 = c1.getRowLength(); short rLen2 = c2.getRowLength(); @@ -2196,7 +2205,7 @@ public Type getType() { * rk len><rk><1 byte cf len><cf><qualifier><8 bytes * timestamp><1 byte type> */ - public static void writeFlatKey(Cell cell, DataOutput out) throws IOException { + public static void writeFlatKey(ExtendedCell cell, DataOutput out) throws IOException { short rowLen = cell.getRowLength(); byte fLen = cell.getFamilyLength(); int qLen = cell.getQualifierLength(); @@ -2223,18 +2232,6 @@ public static void writeFlatKey(Cell cell, DataOutput out) throws IOException { out.writeByte(cell.getTypeByte()); } - /** - * Deep clones the given cell if the cell supports deep cloning - * @param cell the cell to be cloned - * @return the cloned cell - */ - public static Cell deepClone(Cell cell) throws CloneNotSupportedException { - if (cell instanceof ExtendedCell) { - return ((ExtendedCell) cell).deepClone(); - } - throw new CloneNotSupportedException(); - } - /** * Writes the cell to the given OutputStream * @param cell the cell to be written @@ -2242,7 +2239,8 @@ public static Cell deepClone(Cell cell) throws CloneNotSupportedException { * @param withTags if tags are to be written or not * @return the total bytes written */ - public static int writeCell(Cell cell, OutputStream out, boolean withTags) throws IOException { + public static int writeCell(ExtendedCell cell, OutputStream out, boolean withTags) + throws IOException { if (cell instanceof ExtendedCell) { return ((ExtendedCell) cell).write(out, withTags); } else { @@ -2268,23 +2266,7 @@ public static int writeCell(Cell cell, OutputStream out, boolean withTags) throw } } - /** - * Writes a cell to the buffer at the given offset - * @param cell the cell to be written - * @param buf the buffer to which the cell has to be wrriten - * @param offset the offset at which the cell should be written - */ - public static void writeCellToBuffer(Cell cell, ByteBuffer buf, int offset) { - if (cell instanceof ExtendedCell) { - ((ExtendedCell) cell).write(buf, offset); - } else { - // Using the KVUtil - byte[] bytes = KeyValueUtil.copyToNewByteArray(cell); - ByteBufferUtils.copyFromArrayToBuffer(buf, offset, bytes, 0, bytes.length); - } - } - - public static int writeFlatKey(Cell cell, OutputStream out) throws IOException { + public static int writeFlatKey(ExtendedCell cell, OutputStream out) throws IOException { short rowLen = cell.getRowLength(); byte fLen = cell.getFamilyLength(); int qLen = cell.getQualifierLength(); @@ -2359,9 +2341,9 @@ public static void setTimestamp(Cell cell, byte[] ts) throws IOException { * @return True if cell timestamp is modified. * @throws IOException when the passed cell is not of type {@link ExtendedCell} */ - public static boolean updateLatestStamp(Cell cell, long ts) throws IOException { + public static boolean updateLatestStamp(ExtendedCell cell, long ts) throws IOException { if (cell.getTimestamp() == HConstants.LATEST_TIMESTAMP) { - setTimestamp(cell, ts); + cell.setTimestamp(ts); return true; } return false; @@ -2452,7 +2434,8 @@ public static void writeQualifierSkippingBytes(DataOutputStream out, Cell cell, * @param cell The cell whose contents has to be written * @param vlength the value length */ - public static void writeValue(OutputStream out, Cell cell, int vlength) throws IOException { + public static void writeValue(OutputStream out, ExtendedCell cell, int vlength) + throws IOException { if (cell instanceof ByteBufferExtendedCell) { ByteBufferUtils.copyBufferToStream(out, ((ByteBufferExtendedCell) cell).getValueByteBuffer(), ((ByteBufferExtendedCell) cell).getValuePosition(), vlength); @@ -2467,7 +2450,8 @@ public static void writeValue(OutputStream out, Cell cell, int vlength) throws I * @param cell The cell whose contents has to be written * @param tagsLength the tag length */ - public static void writeTags(OutputStream out, Cell cell, int tagsLength) throws IOException { + public static void writeTags(OutputStream out, ExtendedCell cell, int tagsLength) + throws IOException { if (cell instanceof ByteBufferExtendedCell) { ByteBufferUtils.copyBufferToStream(out, ((ByteBufferExtendedCell) cell).getTagsByteBuffer(), ((ByteBufferExtendedCell) cell).getTagsPosition(), tagsLength); @@ -2479,22 +2463,31 @@ public static void writeTags(OutputStream out, Cell cell, int tagsLength) throws /** * special case for Cell.equals */ - public static boolean equalsIgnoreMvccVersion(Cell a, Cell b) { + public static boolean equalsIgnoreMvccVersion(ExtendedCell a, ExtendedCell b) { // row boolean res = CellUtil.matchingRows(a, b); - if (!res) return res; + if (!res) { + return res; + } // family res = CellUtil.matchingColumn(a, b); - if (!res) return res; + if (!res) { + return res; + } // timestamp: later sorts first - if (!CellUtil.matchingTimestamp(a, b)) return false; + if (!CellUtil.matchingTimestamp(a, b)) { + return false; + } // type int c = (0xff & b.getTypeByte()) - (0xff & a.getTypeByte()); - if (c != 0) return false; - else return true; + if (c != 0) { + return false; + } else { + return true; + } } /** @@ -2619,8 +2612,8 @@ public static void compressQualifier(OutputStream out, Cell cell, Dictionary dic * @return an int greater than 0 if left is greater than right lesser than 0 if left is lesser * than right equal to 0 if left is equal to right */ - public static final int compare(CellComparator comparator, Cell left, byte[] key, int offset, - int length) { + public static final int compare(CellComparator comparator, ExtendedCell left, byte[] key, + int offset, int length) { // row short rrowlength = Bytes.toShort(key, offset); int c = comparator.compareRows(left, key, offset + Bytes.SIZEOF_SHORT, rrowlength); @@ -2646,7 +2639,7 @@ public static final int compare(CellComparator comparator, Cell left, byte[] key * @return greater than 0 if left cell is bigger, less than 0 if right cell is bigger, 0 if both * cells are equal */ - static final int compareWithoutRow(CellComparator comparator, Cell left, byte[] right, + static final int compareWithoutRow(CellComparator comparator, ExtendedCell left, byte[] right, int roffset, int rlength, short rowlength) { /*** * KeyValue Format and commonLength: @@ -2954,4 +2947,149 @@ public static ExtendedCell createLastOnRowCol(final Cell cell) { public static ExtendedCell createFirstDeleteFamilyCellOnRow(final byte[] row, final byte[] fam) { return new FirstOnRowDeleteFamilyCell(row, fam); } + + /** + * In fact, in HBase, all {@link Cell}s are {@link ExtendedCell}s. We do not expect users to + * implement their own {@link Cell} types, except some special projects like Phoenix, where they + * just use {@link org.apache.hadoop.hbase.KeyValue} and {@link ExtendedCell} directly. + * @return the original {@code cell} which has already been cast to an {@link ExtendedCell}. + * @throws IllegalArgumentException if the given {@code cell} is not an {@link ExtendedCell}. + */ + public static ExtendedCell ensureExtendedCell(Cell cell) { + Preconditions.checkArgument(cell == null || cell instanceof ExtendedCell, + "Unsupported cell type: %s", cell.getClass().getName()); + return (ExtendedCell) cell; + } + + public static boolean equals(ExtendedCell a, ExtendedCell b) { + return CellUtil.matchingRows(a, b) && CellUtil.matchingFamily(a, b) + && CellUtil.matchingQualifier(a, b) && CellUtil.matchingTimestamp(a, b) + && PrivateCellUtil.matchingType(a, b); + } + + /** Returns ExtendedCellScanner interface over cellIterables */ + public static ExtendedCellScanner + createExtendedCellScanner(final List cellScannerables) { + return new ExtendedCellScanner() { + private final Iterator iterator = + cellScannerables.iterator(); + private ExtendedCellScanner cellScanner = null; + + @Override + public ExtendedCell current() { + return this.cellScanner != null ? this.cellScanner.current() : null; + } + + @Override + public boolean advance() throws IOException { + while (true) { + if (this.cellScanner == null) { + if (!this.iterator.hasNext()) { + return false; + } + this.cellScanner = this.iterator.next().cellScanner(); + } + if (this.cellScanner.advance()) { + return true; + } + this.cellScanner = null; + } + } + }; + } + + /** + * Flatten the map of cells out under the ExtendedCellScanner + * @param map Map of Cell Lists; for example, the map of families to ExtendedCells that is used + * inside Put, etc., keeping Cells organized by family. + * @return ExtendedCellScanner interface over cellIterable + */ + public static ExtendedCellScanner + createExtendedCellScanner(final NavigableMap> map) { + return new ExtendedCellScanner() { + private final Iterator>> entries = map.entrySet().iterator(); + private Iterator currentIterator = null; + private ExtendedCell currentCell; + + @Override + public ExtendedCell current() { + return this.currentCell; + } + + @Override + public boolean advance() { + while (true) { + if (this.currentIterator == null) { + if (!this.entries.hasNext()) return false; + this.currentIterator = this.entries.next().getValue().iterator(); + } + if (this.currentIterator.hasNext()) { + this.currentCell = this.currentIterator.next(); + return true; + } + this.currentCell = null; + this.currentIterator = null; + } + } + }; + } + + /** Returns CellScanner interface over cellArray */ + public static ExtendedCellScanner createExtendedCellScanner(final ExtendedCell[] cellArray) { + return new ExtendedCellScanner() { + private final ExtendedCell[] cells = cellArray; + private int index = -1; + + @Override + public ExtendedCell current() { + if (cells == null) { + return null; + } + return (index < 0) ? null : this.cells[index]; + } + + @Override + public boolean advance() { + if (cells == null) { + return false; + } + return ++index < this.cells.length; + } + }; + } + + /** Returns ExtendedCellScanner interface over cellIterable */ + public static ExtendedCellScanner + createExtendedCellScanner(final Iterable cellIterable) { + if (cellIterable == null) { + return null; + } + return createExtendedCellScanner(cellIterable.iterator()); + } + + /** + * Returns ExtendedCellScanner interface over cellIterable or null if + * cells is null + */ + public static ExtendedCellScanner createExtendedCellScanner(final Iterator cells) { + if (cells == null) { + return null; + } + return new ExtendedCellScanner() { + private final Iterator iterator = cells; + private ExtendedCell current = null; + + @Override + public ExtendedCell current() { + return this.current; + } + + @Override + public boolean advance() { + boolean hasNext = this.iterator.hasNext(); + this.current = hasNext ? this.iterator.next() : null; + return hasNext; + } + }; + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java index 6d911bccd58a..cc15fa7ba752 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java @@ -66,12 +66,12 @@ public static Pair readVIntValuePart(Tag tag, int offset) thro } /** Returns A List<Tag> of any Tags found in cell else null. */ - public static List carryForwardTags(final Cell cell) { + public static List carryForwardTags(final ExtendedCell cell) { return carryForwardTags(null, cell); } /** Add to tagsOrNull any Tags cell is carrying or null if none. */ - public static List carryForwardTags(final List tagsOrNull, final Cell cell) { + public static List carryForwardTags(final List tagsOrNull, final ExtendedCell cell) { Iterator itr = PrivateCellUtil.tagsIterator(cell); if (itr == EMPTY_TAGS_ITR) { // If no Tags, return early. @@ -87,7 +87,7 @@ public static List carryForwardTags(final List tagsOrNull, final Cell return tags; } - public static byte[] concatTags(byte[] tags, Cell cell) { + public static byte[] concatTags(byte[] tags, ExtendedCell cell) { int cellTagsLen = cell.getTagsLength(); if (cellTagsLen == 0) { // If no Tags, return early. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java index 9a2a29356b14..8f4692ee778f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java @@ -22,21 +22,21 @@ import java.io.IOException; import java.io.InputStream; import java.io.PushbackInputStream; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * TODO javadoc + * Base implementation for {@link Codec.Decoder}. */ @InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX }) public abstract class BaseDecoder implements Codec.Decoder { protected static final Logger LOG = LoggerFactory.getLogger(BaseDecoder.class); protected final InputStream in; - private Cell current = null; + private ExtendedCell current = null; protected static class PBIS extends PushbackInputStream { public PBIS(InputStream in, int size) { @@ -98,10 +98,10 @@ protected InputStream getInputStream() { * thrown if EOF is reached prematurely. Does not return null. */ @NonNull - protected abstract Cell parseCell() throws IOException; + protected abstract ExtendedCell parseCell() throws IOException; @Override - public Cell current() { + public ExtendedCell current() { return this.current; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java index 3ca5d2462de3..4875fcd227c9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.io.OutputStream; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; @@ -42,7 +42,7 @@ protected OutputStream getOuputStream() { } @Override - public abstract void write(Cell cell) throws IOException; + public abstract void write(ExtendedCell cell) throws IOException; protected void checkFlushed() throws CodecException { if (this.flushed) throw new CodecException("Flushed; done"); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java index f4552c038267..356513be7f75 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java @@ -21,8 +21,8 @@ import java.io.InputStream; import java.io.OutputStream; import org.apache.commons.io.IOUtils; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -43,7 +43,7 @@ static class CellEncoder extends BaseEncoder { } @Override - public void write(Cell cell) throws IOException { + public void write(ExtendedCell cell) throws IOException { checkFlushed(); // Row write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); @@ -80,7 +80,7 @@ public CellDecoder(final InputStream in) { } @Override - protected Cell parseCell() throws IOException { + protected ExtendedCell parseCell() throws IOException { byte[] row = readByteArray(this.in); byte[] family = readByteArray(in); byte[] qualifier = readByteArray(in); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java index 07bfb53d5df7..6a8e7e944439 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java @@ -21,8 +21,8 @@ import java.io.InputStream; import java.io.OutputStream; import org.apache.commons.io.IOUtils; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -44,7 +44,7 @@ static class CellEncoder extends BaseEncoder { } @Override - public void write(Cell cell) throws IOException { + public void write(ExtendedCell cell) throws IOException { checkFlushed(); // Row write(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); @@ -82,7 +82,7 @@ public CellDecoder(final InputStream in) { } @Override - protected Cell parseCell() throws IOException { + protected ExtendedCell parseCell() throws IOException { byte[] row = readByteArray(this.in); byte[] family = readByteArray(in); byte[] qualifier = readByteArray(in); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java index df8a94f5614b..18c8b8ea0ab8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/Codec.java @@ -19,7 +19,7 @@ import java.io.InputStream; import java.io.OutputStream; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.io.CellOutputStream; import org.apache.hadoop.hbase.nio.ByteBuff; @@ -46,7 +46,7 @@ interface Encoder extends CellOutputStream { * Implementations should implicitly clean up any resources allocated when the Decoder/CellScanner * runs off the end of the cell block. Do this rather than require the user call close explicitly. */ - interface Decoder extends CellScanner { + interface Decoder extends ExtendedCellScanner { } Decoder getDecoder(InputStream is); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java index 9913eac3615c..ef40b395b7b1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java @@ -21,7 +21,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.NoTagsByteBufferKeyValue; @@ -55,7 +55,7 @@ public KeyValueEncoder(final OutputStream out) { } @Override - public void write(Cell cell) throws IOException { + public void write(ExtendedCell cell) throws IOException { checkFlushed(); // Do not write tags over RPC ByteBufferUtils.putInt(this.out, KeyValueUtil.getSerializedSize(cell, false)); @@ -69,7 +69,7 @@ public KeyValueDecoder(final InputStream in) { } @Override - protected Cell parseCell() throws IOException { + protected ExtendedCell parseCell() throws IOException { // No tags here return KeyValueUtil.createKeyValueFromInputStream(in, false); } @@ -78,7 +78,7 @@ protected Cell parseCell() throws IOException { public static class ByteBuffKeyValueDecoder implements Codec.Decoder { protected final ByteBuff buf; - protected Cell current = null; + protected ExtendedCell current = null; public ByteBuffKeyValueDecoder(ByteBuff buf) { this.buf = buf; @@ -101,15 +101,15 @@ public boolean advance() throws IOException { } @Override - public Cell current() { + public ExtendedCell current() { return this.current; } - protected Cell createCell(byte[] buf, int offset, int len) { + protected ExtendedCell createCell(byte[] buf, int offset, int len) { return new NoTagsKeyValue(buf, offset, len); } - protected Cell createCell(ByteBuffer bb, int pos, int len) { + protected ExtendedCell createCell(ByteBuffer bb, int pos, int len) { // We know there is not going to be any tags. return new NoTagsByteBufferKeyValue(bb, pos, len); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java index 8c2c20625dfb..655bc4c5f261 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java @@ -22,7 +22,7 @@ import java.io.OutputStream; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferKeyValue; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -58,7 +58,7 @@ public KeyValueEncoder(final OutputStream out) { } @Override - public void write(Cell cell) throws IOException { + public void write(ExtendedCell cell) throws IOException { checkFlushed(); // Write tags ByteBufferUtils.putInt(this.out, KeyValueUtil.getSerializedSize(cell, true)); @@ -72,7 +72,7 @@ public KeyValueDecoder(final InputStream in) { } @Override - protected Cell parseCell() throws IOException { + protected ExtendedCell parseCell() throws IOException { // create KeyValue with tags return KeyValueUtil.createKeyValueFromInputStream(in, true); } @@ -85,12 +85,12 @@ public ByteBuffKeyValueDecoder(ByteBuff buf) { } @Override - protected Cell createCell(byte[] buf, int offset, int len) { + protected ExtendedCell createCell(byte[] buf, int offset, int len) { return new KeyValue(buf, offset, len); } @Override - protected Cell createCell(ByteBuffer bb, int pos, int len) { + protected ExtendedCell createCell(ByteBuffer bb, int pos, int len) { return new ByteBufferKeyValue(bb, pos, len); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/CellOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/CellOutputStream.java index d1310137e8ce..8236dceca913 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/CellOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/CellOutputStream.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.io; import java.io.IOException; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; @@ -40,7 +40,7 @@ public interface CellOutputStream { * the copy of the Cell that was added in the write. * @param cell Cell to write out */ - void write(Cell cell) throws IOException; + void write(ExtendedCell cell) throws IOException; /** * Let the implementation decide what to do. Usually means writing accumulated data into a byte[] diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedCellScanner.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedExtendedCellScanner.java similarity index 85% rename from hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedCellScanner.java rename to hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedExtendedCellScanner.java index 379dfca051a3..8f31ba8fc832 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedCellScanner.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/SizedExtendedCellScanner.java @@ -17,15 +17,13 @@ */ package org.apache.hadoop.hbase.io; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.yetus.audience.InterfaceAudience; -import org.apache.yetus.audience.InterfaceStability; /** * A CellScanner that knows its size in memory in bytes. Used playing the CellScanner into an * in-memory buffer; knowing the size ahead of time saves on background buffer resizings. */ @InterfaceAudience.Private -@InterfaceStability.Unstable -public interface SizedCellScanner extends CellScanner, HeapSize { +public interface SizedExtendedCellScanner extends ExtendedCellScanner, HeapSize { } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index 68b300ae60fe..31724723bcd4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -29,7 +29,7 @@ import java.util.List; import org.apache.commons.lang3.NotImplementedException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; @@ -93,7 +93,7 @@ public EncodedDataBlock(Configuration conf, DataBlockEncoder dataBlockEncoder, * @param headerSize header size of the block. * @return Forwards sequential iterator. */ - public Iterator getIterator(int headerSize) { + public Iterator getIterator(int headerSize) { final int rawSize = rawKVs.length; byte[] encodedDataWithHeader = getEncodedData(); int bytesToSkip = headerSize + Bytes.SIZEOF_SHORT; @@ -101,7 +101,7 @@ public Iterator getIterator(int headerSize) { encodedDataWithHeader.length - bytesToSkip); final DataInputStream dis = new DataInputStream(bais); - return new Iterator() { + return new Iterator() { private ByteBuffer decompressedData = null; private Iterator it = isTagsLenZero.iterator(); @@ -114,7 +114,7 @@ public boolean hasNext() { } @Override - public Cell next() { + public ExtendedCell next() { if (decompressedData == null) { try { decompressedData = dataBlockEncoder.decodeKeyValues(dis, diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java index 2c48e5b7d7b8..7fb4fd9685e9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java @@ -19,7 +19,7 @@ import java.io.DataOutputStream; import java.io.IOException; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -37,7 +37,7 @@ public NoneEncoder(DataOutputStream out, HFileBlockDefaultEncodingContext encodi this.encodingCtx = encodingCtx; } - public int write(Cell cell) throws IOException { + public int write(ExtendedCell cell) throws IOException { // We write tags seperately because though there is no tag in KV // if the hfilecontext says include tags we need the tags length to be // written diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java index c4b24728b427..553b39311369 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java @@ -76,7 +76,7 @@ public void testCompareCells() { kv1 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put); kv2 = new KeyValue(row1, fam1, qual1, 1L, KeyValue.Type.Put); - assertTrue(CellUtil.equals(kv1, kv2)); + assertTrue(PrivateCellUtil.equals(kv1, kv2)); } @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index dcd796fa3d4f..3ff63ef6ff65 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -22,7 +22,6 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -92,7 +91,7 @@ public boolean advance() { /** * Cell used in test. Has row only. */ - private static class TestCell implements Cell { + private static class TestCell implements ExtendedCell { private final byte[] row; TestCell(final int i) { @@ -116,67 +115,56 @@ public short getRowLength() { @Override public byte[] getFamilyArray() { - // TODO Auto-generated method stub return null; } @Override public int getFamilyOffset() { - // TODO Auto-generated method stub return 0; } @Override public byte getFamilyLength() { - // TODO Auto-generated method stub return 0; } @Override public byte[] getQualifierArray() { - // TODO Auto-generated method stub return null; } @Override public int getQualifierOffset() { - // TODO Auto-generated method stub return 0; } @Override public int getQualifierLength() { - // TODO Auto-generated method stub return 0; } @Override public long getTimestamp() { - // TODO Auto-generated method stub return 0; } @Override public byte getTypeByte() { - // TODO Auto-generated method stub return 0; } @Override public byte[] getValueArray() { - // TODO Auto-generated method stub return null; } @Override public int getValueOffset() { - // TODO Auto-generated method stub return 0; } @Override public int getValueLength() { - // TODO Auto-generated method stub return 0; } @@ -187,25 +175,21 @@ public int getSerializedSize() { @Override public byte[] getTagsArray() { - // TODO Auto-generated method stub return null; } @Override public int getTagsOffset() { - // TODO Auto-generated method stub return 0; } @Override public long getSequenceId() { - // TODO Auto-generated method stub return 0; } @Override public int getTagsLength() { - // TODO Auto-generated method stub return 0; } @@ -213,6 +197,20 @@ public int getTagsLength() { public long heapSize() { return 0; } + + @Override + public void setSequenceId(long seqId) throws IOException { + + } + + @Override + public void setTimestamp(long ts) throws IOException { + + } + + @Override + public void setTimestamp(byte[] ts) throws IOException { + } } /** @@ -444,7 +442,7 @@ public void testCloneCellFieldsFromByteBufferedCell() { KeyValue kv = new KeyValue(r, f, q, 0, q.length, 1234L, KeyValue.Type.Put, v, 0, v.length, tags); ByteBuffer buffer = ByteBuffer.wrap(kv.getBuffer()); - Cell bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); + ExtendedCell bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining()); byte[] rDest = CellUtil.cloneRow(bbCell); assertTrue(Bytes.equals(r, rDest)); byte[] fDest = CellUtil.cloneFamily(bbCell); @@ -519,30 +517,9 @@ public void testCellFieldsAsPrimitiveTypesFromByteBufferedCell() { assertEquals(bd, PrivateCellUtil.getValueAsBigDecimal(bbCell)); } - @Test - public void testWriteCell() throws IOException { - byte[] r = Bytes.toBytes("row1"); - byte[] f = Bytes.toBytes("cf1"); - byte[] q1 = Bytes.toBytes("qual1"); - byte[] v = Bytes.toBytes("val1"); - byte[] tags = Bytes.toBytes("tag1"); - KeyValue kv = - new KeyValue(r, f, q1, 0, q1.length, 1234L, KeyValue.Type.Put, v, 0, v.length, tags); - NonExtendedCell nonExtCell = new NonExtendedCell(kv); - ByteArrayOutputStream os = new ByteArrayOutputStream(); - PrivateCellUtil.writeCell(nonExtCell, os, true); - byte[] byteArray = os.toByteArray(); - KeyValue res = new KeyValue(byteArray); - assertTrue(CellUtil.equals(kv, res)); - } - - // Workaround for jdk 11 - reflective access to interface default methods for testGetType - private static abstract class CellForMockito implements Cell { - } - @Test public void testGetType() { - CellForMockito c = Mockito.mock(CellForMockito.class); + ExtendedCell c = Mockito.mock(ExtendedCell.class); Mockito.when(c.getType()).thenCallRealMethod(); for (Cell.Type type : Cell.Type.values()) { Mockito.when(c.getTypeByte()).thenReturn(type.getCode()); @@ -563,112 +540,4 @@ public void testGetType() { } catch (UnsupportedOperationException e) { } } - - private static class NonExtendedCell implements Cell { - private KeyValue kv; - - public NonExtendedCell(KeyValue kv) { - this.kv = kv; - } - - @Override - public byte[] getRowArray() { - return this.kv.getRowArray(); - } - - @Override - public int getRowOffset() { - return this.kv.getRowOffset(); - } - - @Override - public short getRowLength() { - return this.kv.getRowLength(); - } - - @Override - public byte[] getFamilyArray() { - return this.kv.getFamilyArray(); - } - - @Override - public int getFamilyOffset() { - return this.kv.getFamilyOffset(); - } - - @Override - public byte getFamilyLength() { - return this.kv.getFamilyLength(); - } - - @Override - public byte[] getQualifierArray() { - return this.kv.getQualifierArray(); - } - - @Override - public int getQualifierOffset() { - return this.kv.getQualifierOffset(); - } - - @Override - public int getQualifierLength() { - return this.kv.getQualifierLength(); - } - - @Override - public long getTimestamp() { - return this.kv.getTimestamp(); - } - - @Override - public byte getTypeByte() { - return this.kv.getTypeByte(); - } - - @Override - public long getSequenceId() { - return this.kv.getSequenceId(); - } - - @Override - public byte[] getValueArray() { - return this.kv.getValueArray(); - } - - @Override - public int getValueOffset() { - return this.kv.getValueOffset(); - } - - @Override - public int getValueLength() { - return this.kv.getValueLength(); - } - - @Override - public int getSerializedSize() { - return this.kv.getSerializedSize(); - } - - @Override - public byte[] getTagsArray() { - return this.kv.getTagsArray(); - } - - @Override - public int getTagsOffset() { - return this.kv.getTagsOffset(); - } - - @Override - public int getTagsLength() { - return this.kv.getTagsLength(); - } - - @Override - public long heapSize() { - return this.kv.heapSize(); - } - } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java index 9963e37c17f6..3508419cea78 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java @@ -26,10 +26,10 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -113,14 +113,14 @@ public void testThree() throws IOException { DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); - Cell c = decoder.current(); - assertTrue(CellUtil.equals(c, kv1)); + ExtendedCell c = decoder.current(); + assertTrue(PrivateCellUtil.equals(c, kv1)); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellUtil.equals(c, kv2)); + assertTrue(PrivateCellUtil.equals(c, kv2)); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellUtil.equals(c, kv3)); + assertTrue(PrivateCellUtil.equals(c, kv3)); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index b44473ee49e6..17c95eb2c729 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -28,8 +28,8 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.ArrayBackedTag; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -58,15 +58,15 @@ public void testCellWithTag() throws IOException { DataOutputStream dos = new DataOutputStream(cos); Codec codec = new CellCodecWithTags(); Codec.Encoder encoder = codec.getEncoder(dos); - final Cell cell1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), - HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), + final ExtendedCell cell1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), + Bytes.toBytes("1"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) }); - final Cell cell2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), - HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), + final ExtendedCell cell2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), + Bytes.toBytes("2"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring3")), }); - final Cell cell3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), - HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), + final ExtendedCell cell3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), + Bytes.toBytes("3"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] { new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) }); @@ -81,7 +81,7 @@ public void testCellWithTag() throws IOException { DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); - Cell c = decoder.current(); + ExtendedCell c = decoder.current(); assertTrue(CellUtil.equals(c, cell1)); List tags = PrivateCellUtil.getTags(c); assertEquals(2, tags.size()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index 66a874e5c5ec..e2bdecbf7c6b 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -28,8 +28,8 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.ArrayBackedTag; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -81,7 +81,7 @@ public void testKeyValueWithTag() throws IOException { DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = codec.getDecoder(dis); assertTrue(decoder.advance()); - Cell c = decoder.current(); + ExtendedCell c = decoder.current(); assertTrue(CellUtil.equals(c, kv1)); List tags = PrivateCellUtil.getTags(c); assertEquals(2, tags.size()); diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java index 9d122fb0b81c..cb6e14d1b278 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java @@ -26,8 +26,8 @@ import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellScannable; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -72,13 +72,14 @@ public HBaseRpcController newController() { } @Override - public HBaseRpcController newController(RegionInfo regionInfo, CellScanner cellScanner) { + public HBaseRpcController newController(RegionInfo regionInfo, + ExtendedCellScanner cellScanner) { return new CountingRpcController(super.newController(regionInfo, cellScanner)); } @Override public HBaseRpcController newController(RegionInfo regionInfo, - List cellIterables) { + List cellIterables) { return new CountingRpcController(super.newController(regionInfo, cellIterables)); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSerialization.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSerialization.java index 9d567f95a0e0..456f99629e34 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSerialization.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSerialization.java @@ -23,6 +23,7 @@ import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -85,7 +86,12 @@ public void open(OutputStream os) throws IOException { @Override public void serialize(Cell kv) throws IOException { dos.writeInt(PrivateCellUtil.estimatedSerializedSizeOf(kv) - Bytes.SIZEOF_INT); - PrivateCellUtil.writeCell(kv, dos, true); + if (kv instanceof ExtendedCell) { + ((ExtendedCell) kv).write(dos, true); + } else { + throw new UnsupportedOperationException( + "Unsupported cell type: " + kv.getClass().getName()); + } } } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSortReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSortReducer.java index 9380b0e71336..dd6ff00497f4 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSortReducer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/CellSortReducer.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.mapreduce; -import java.io.IOException; import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.MapReduceExtendedCell; @@ -38,19 +38,17 @@ public class CellSortReducer protected void reduce(ImmutableBytesWritable row, Iterable kvs, Reducer.Context context) throws java.io.IOException, InterruptedException { - TreeSet map = new TreeSet<>(CellComparator.getInstance()); + TreeSet set = new TreeSet<>(CellComparator.getInstance()); for (Cell kv : kvs) { - try { - map.add(PrivateCellUtil.deepClone(kv)); - } catch (CloneNotSupportedException e) { - throw new IOException(e); - } + set.add(PrivateCellUtil.ensureExtendedCell(kv)); } - context.setStatus("Read " + map.getClass()); + context.setStatus("Read " + set.getClass()); int index = 0; - for (Cell kv : map) { + for (ExtendedCell kv : set) { context.write(row, new MapReduceExtendedCell(kv)); - if (++index % 100 == 0) context.setStatus("Wrote " + index); + if (++index % 100 == 0) { + context.setStatus("Wrote " + index); + } } } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExtendedCellSerialization.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExtendedCellSerialization.java index c784b2561881..9e89c7896261 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExtendedCellSerialization.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExtendedCellSerialization.java @@ -94,7 +94,7 @@ public void open(OutputStream os) throws IOException { @Override public void serialize(ExtendedCell kv) throws IOException { dos.writeInt(PrivateCellUtil.estimatedSerializedSizeOf(kv) - Bytes.SIZEOF_INT); - PrivateCellUtil.writeCell(kv, dos, true); + kv.write(dos, true); dos.writeLong(kv.getSequenceId()); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index 225a2dc6fe2b..51e23abc8ca6 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -240,13 +240,13 @@ static RecordWriter createRecordWrit @Override public void write(ImmutableBytesWritable row, V cell) throws IOException { - Cell kv = cell; // null input == user explicitly wants to flush - if (row == null && kv == null) { + if (row == null && cell == null) { rollWriters(null); return; } + ExtendedCell kv = PrivateCellUtil.ensureExtendedCell(cell); byte[] rowKey = CellUtil.cloneRow(kv); int length = (PrivateCellUtil.estimatedSerializedSizeOf(kv)) - Bytes.SIZEOF_INT; byte[] family = CellUtil.cloneFamily(kv); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index 6605c6783ba8..8a8b846959b6 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -51,6 +52,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; @@ -114,7 +116,7 @@ public int getPartition(CellWritableComparable key, Cell value, int numPartition public static class CellWritableComparable implements WritableComparable { - private Cell kv = null; + private ExtendedCell kv = null; static { // register this comparator @@ -125,7 +127,7 @@ public CellWritableComparable() { } public CellWritableComparable(Cell kv) { - this.kv = kv; + this.kv = (ExtendedCell) kv; } @Override @@ -174,7 +176,7 @@ protected void reduce(CellWritableComparable row, Iterable kvs, int index = 0; for (Cell kv : kvs) { context.write(new ImmutableBytesWritable(CellUtil.cloneRow(kv)), - new MapReduceExtendedCell(kv)); + new MapReduceExtendedCell(PrivateCellUtil.ensureExtendedCell(kv))); if (++index % 100 == 0) context.setStatus("Wrote " + index + " KeyValues, " + "and the rowkey whose is being wrote is " + Bytes.toString(kv.getRowArray())); } @@ -203,10 +205,12 @@ public void map(ImmutableBytesWritable row, Result value, Context context) throw filter == null || !filter.filterRowKey( PrivateCellUtil.createFirstOnRow(row.get(), row.getOffset(), (short) row.getLength())) ) { - for (Cell kv : value.rawCells()) { + for (ExtendedCell kv : PackagePrivateFieldAccessor.getExtendedRawCells(value)) { kv = filterKv(filter, kv); // skip if we filtered it out - if (kv == null) continue; + if (kv == null) { + continue; + } Cell ret = convertKv(kv, cfRenameMap); context.write(new CellWritableComparable(ret), ret); } @@ -267,10 +271,12 @@ public void map(ImmutableBytesWritable row, Result value, Context context) throw filter == null || !filter.filterRowKey( PrivateCellUtil.createFirstOnRow(row.get(), row.getOffset(), (short) row.getLength())) ) { - for (Cell kv : value.rawCells()) { + for (ExtendedCell kv : PackagePrivateFieldAccessor.getExtendedRawCells(value)) { kv = filterKv(filter, kv); // skip if we filtered it out - if (kv == null) continue; + if (kv == null) { + continue; + } context.write(row, new MapReduceExtendedCell(convertKv(kv, cfRenameMap))); } } @@ -330,10 +336,12 @@ private void writeResult(ImmutableBytesWritable key, Result result, Context cont protected void processKV(ImmutableBytesWritable key, Result result, Context context, Put put, Delete delete) throws IOException, InterruptedException { - for (Cell kv : result.rawCells()) { + for (ExtendedCell kv : PackagePrivateFieldAccessor.getExtendedRawCells(result)) { kv = filterKv(filter, kv); // skip if we filter it out - if (kv == null) continue; + if (kv == null) { + continue; + } kv = convertKv(kv, cfRenameMap); // Deletes and Puts are gathered and written when finished @@ -476,7 +484,7 @@ private static ArrayList toQuotedByteArrays(String... stringArgs) { * @return null if the key should not be written, otherwise returns the original * {@link Cell} */ - public static Cell filterKv(Filter filter, Cell c) throws IOException { + public static ExtendedCell filterKv(Filter filter, ExtendedCell c) throws IOException { // apply the filter and skip this kv if the filter doesn't apply if (filter != null) { Filter.ReturnCode code = filter.filterCell(c); @@ -495,7 +503,7 @@ public static Cell filterKv(Filter filter, Cell c) throws IOException { } // helper: create a new KeyValue based on CF rename map - private static Cell convertKv(Cell kv, Map cfRenameMap) { + private static ExtendedCell convertKv(ExtendedCell kv, Map cfRenameMap) { if (cfRenameMap != null) { // If there's a rename mapping for this CF, create a new KeyValue byte[] newCfName = cfRenameMap.get(CellUtil.cloneFamily(kv)); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java index 90dc5c1d555f..cd25736bd6ee 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java @@ -21,10 +21,8 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ExtendedCell; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.mapreduce.Reducer; import org.apache.yetus.audience.InterfaceAudience; @@ -51,45 +49,51 @@ protected void reduce(K row, Iterable vals, Context context) context.getConfiguration().getLong("putcombiner.row.threshold", 1L * (1 << 30)); int cnt = 0; long curSize = 0; - Put put = null; - Map> familyMap = null; + Put combinedPut = null; + Map> combinedFamilyMap = null; for (Put p : vals) { cnt++; - if (put == null) { - put = p; - familyMap = put.getFamilyCellMap(); + if (combinedPut == null) { + combinedPut = p; + combinedFamilyMap = PackagePrivateFieldAccessor.getExtendedFamilyCellMap(combinedPut); } else { - for (Entry> entry : p.getFamilyCellMap().entrySet()) { - List cells = familyMap.get(entry.getKey()); - List kvs = (cells != null) ? (List) cells : null; - for (Cell cell : entry.getValue()) { - KeyValue kv = KeyValueUtil.ensureKeyValue((ExtendedCell) cell); - curSize += kv.heapSize(); - if (kvs != null) { - kvs.add(kv); + for (Entry> entry : PackagePrivateFieldAccessor + .getExtendedFamilyCellMap(p).entrySet()) { + List existCells = combinedFamilyMap.get(entry.getKey()); + if (existCells == null) { + // no cells for this family yet, just put it + combinedFamilyMap.put(entry.getKey(), entry.getValue()); + // do not forget to calculate the size + for (ExtendedCell cell : entry.getValue()) { + curSize += cell.heapSize(); + } + } else { + // otherwise just add the cells to the existent list for this family + for (ExtendedCell cell : entry.getValue()) { + existCells.add(cell); + curSize += cell.heapSize(); } } - if (cells == null) { - familyMap.put(entry.getKey(), entry.getValue()); - } } - if (cnt % 10 == 0) context.setStatus("Combine " + cnt); + if (cnt % 10 == 0) { + context.setStatus("Combine " + cnt); + } if (curSize > threshold) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Combined %d Put(s) into %d.", cnt, 1)); } - context.write(row, put); - put = null; + context.write(row, combinedPut); + combinedPut = null; curSize = 0; cnt = 0; } } } - if (put != null) { + if (combinedPut != null) { if (LOG.isDebugEnabled()) { LOG.debug(String.format("Combined %d Put(s) into %d.", cnt, 1)); } - context.write(row, put); + context.write(row, combinedPut); } } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java index b4061d6be6a9..c8f32c205fb7 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java @@ -24,7 +24,6 @@ import java.util.TreeSet; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.KeyValue; @@ -32,6 +31,7 @@ import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagUtil; +import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -62,9 +62,9 @@ public class PutSortReducer } @Override - protected void reduce(ImmutableBytesWritable row, java.lang.Iterable puts, + protected void reduce(ImmutableBytesWritable row, Iterable puts, Reducer.Context context) - throws java.io.IOException, InterruptedException { + throws IOException, InterruptedException { // although reduce() is called per-row, handle pathological case long threshold = context.getConfiguration().getLong("putsortreducer.row.threshold", 1L * (1 << 30)); @@ -100,8 +100,9 @@ protected void reduce(ImmutableBytesWritable row, java.lang.Iterable puts, // just ignoring the bad one? throw new IOException("Invalid visibility expression found in mutation " + p, e); } - for (List cells : p.getFamilyCellMap().values()) { - for (ExtendedCell cell : (List) (List) cells) { + for (List cells : PackagePrivateFieldAccessor.getExtendedFamilyCellMap(p) + .values()) { + for (ExtendedCell cell : cells) { // Creating the KV which needs to be directly written to HFiles. Using the Facade // KVCreator for creation of kvs. KeyValue kv = null; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java index 37e99c096e5c..888e285f340e 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java @@ -117,7 +117,8 @@ public void map(WALKey key, WALEdit value, Context context) throws IOException { byte[] outKey = multiTableSupport ? Bytes.add(table.getName(), Bytes.toBytes(tableSeparator), CellUtil.cloneRow(cell)) : CellUtil.cloneRow(cell); - context.write(new ImmutableBytesWritable(outKey), new MapReduceExtendedCell(cell)); + context.write(new ImmutableBytesWritable(outKey), + new MapReduceExtendedCell(PrivateCellUtil.ensureExtendedCell(cell))); } } } catch (InterruptedException e) { diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceExtendedCell.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceExtendedCell.java index ca0d5f63a078..4233e96f2c79 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceExtendedCell.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceExtendedCell.java @@ -21,7 +21,6 @@ import java.io.OutputStream; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.ByteBufferExtendedCell; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -33,9 +32,9 @@ @InterfaceAudience.Private public class MapReduceExtendedCell extends ByteBufferExtendedCell { - private final Cell cell; + private final ExtendedCell cell; - public MapReduceExtendedCell(Cell cell) { + public MapReduceExtendedCell(ExtendedCell cell) { this.cell = cell; } @@ -226,17 +225,17 @@ public String toString() { @Override public void setSequenceId(long seqId) throws IOException { - PrivateCellUtil.setSequenceId(cell, seqId); + cell.setSequenceId(seqId); } @Override public void setTimestamp(long ts) throws IOException { - PrivateCellUtil.setTimestamp(cell, ts); + cell.setTimestamp(ts); } @Override public void setTimestamp(byte[] ts) throws IOException { - PrivateCellUtil.setTimestamp(cell, ts); + cell.setTimestamp(ts); } @Override @@ -246,7 +245,7 @@ public long heapSize() { @Override public int write(OutputStream out, boolean withTags) throws IOException { - return PrivateCellUtil.writeCell(cell, out, withTags); + return cell.write(out, withTags); } @Override @@ -256,15 +255,11 @@ public int getSerializedSize(boolean withTags) { @Override public void write(ByteBuffer buf, int offset) { - PrivateCellUtil.writeCellToBuffer(cell, buf, offset); + cell.write(buf, offset); } @Override public ExtendedCell deepClone() { - try { - return (ExtendedCell) PrivateCellUtil.deepClone(cell); - } catch (CloneNotSupportedException e) { - throw new RuntimeException(e); - } + return cell.deepClone(); } } diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java index e67ee3dbb736..0714f27e64d9 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java @@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; @@ -518,7 +519,7 @@ public void test_WritingTagData() throws Exception { HFile.createReader(fs, keyFileStatus.getPath(), new CacheConfig(conf), true, conf); HFileScanner scanner = reader.getScanner(conf, false, false, false); scanner.seekTo(); - Cell cell = scanner.getCell(); + ExtendedCell cell = scanner.getCell(); List tagsFromCell = PrivateCellUtil.getTags(cell); assertTrue(tagsFromCell.size() > 0); for (Tag tag : tagsFromCell) { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 5e3e52de6ad4..25bec82da09e 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -42,9 +42,9 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.Result; @@ -365,7 +366,7 @@ public void testWithDeletes() throws Throwable { s.setRaw(true); ResultScanner scanner = t.getScanner(s); Result r = scanner.next(); - Cell[] res = r.rawCells(); + ExtendedCell[] res = PackagePrivateFieldAccessor.getExtendedRawCells(r); assertTrue(PrivateCellUtil.isDeleteFamily(res[0])); assertEquals(now + 4, res[1].getTimestamp()); assertEquals(now + 3, res[2].getTimestamp()); @@ -820,7 +821,7 @@ public void testTagsAddition() throws Throwable { } private void checkWhetherTagExists(TableName table, boolean tagExists) throws IOException { - List values = new ArrayList<>(); + List values = new ArrayList<>(); for (HRegion region : UTIL.getHBaseCluster().getRegions(table)) { Scan scan = new Scan(); // Make sure to set rawScan to true so that we will get Delete Markers. @@ -830,13 +831,13 @@ private void checkWhetherTagExists(TableName table, boolean tagExists) throws IO // Need to use RegionScanner instead of table#getScanner since the latter will // not return tags since it will go through rpc layer and remove tags intentionally. RegionScanner scanner = region.getScanner(scan); - scanner.next(values); + scanner.next((List) values); if (!values.isEmpty()) { break; } } boolean deleteFound = false; - for (Cell cell : values) { + for (ExtendedCell cell : values) { if (PrivateCellUtil.isDelete(cell.getType().getCode())) { deleteFound = true; List tags = PrivateCellUtil.getTags(cell); @@ -881,11 +882,11 @@ public void preBatchMutate(ObserverContext c, } Tag sourceOpTag = new ArrayBackedTag(TEST_TAG_TYPE, sourceOpAttr); List updatedCells = new ArrayList<>(); - for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { - Cell cell = cellScanner.current(); + for (ExtendedCellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { + ExtendedCell cell = cellScanner.current(); List tags = PrivateCellUtil.getTags(cell); tags.add(sourceOpTag); - Cell updatedCell = PrivateCellUtil.createCell((ExtendedCell) cell, tags); + Cell updatedCell = PrivateCellUtil.createCell(cell, tags); updatedCells.add(updatedCell); } m.getFamilyCellMap().clear(); @@ -934,9 +935,10 @@ public void testTagsWithEmptyCodec() throws Exception { int count = 0; Result result; while ((result = scanner.next()) != null) { - List cells = result.listCells(); + List cells = + Arrays.asList(PackagePrivateFieldAccessor.getExtendedRawCells(result)); assertEquals(2, cells.size()); - Cell cell = cells.get(0); + ExtendedCell cell = cells.get(0); assertTrue(CellUtil.isDelete(cell)); List tags = PrivateCellUtil.getTags(cell); assertEquals(0, tags.size()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionReplicationRetryingCaller.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionReplicationRetryingCaller.java index e2b45fe30c3c..a0ca5b990dd1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionReplicationRetryingCaller.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionReplicationRetryingCaller.java @@ -22,8 +22,8 @@ import java.io.IOException; import java.util.Collections; import java.util.List; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.protobuf.ReplicationProtobufUtil; import org.apache.hadoop.hbase.util.Pair; @@ -94,7 +94,7 @@ private void call(HRegionLocation loc) { err -> conn.getLocator().updateCachedLocationOnError(loc, err)); return; } - Pair pair = ReplicationProtobufUtil + Pair pair = ReplicationProtobufUtil .buildReplicateWALEntryRequest(entries, replica.getEncodedNameAsBytes(), null, null, null); resetCallTimeout(); controller.setCellScanner(pair.getSecond()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java index f5fcc02e9186..81707fe1f16b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/AsyncRegionServerAdmin.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.util.concurrent.CompletableFuture; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.yetus.audience.InterfaceAudience; @@ -95,7 +95,8 @@ private interface RpcCall { void call(AdminService.Interface stub, HBaseRpcController controller, RpcCallback done); } - private CompletableFuture call(RpcCall rpcCall, CellScanner cellScanner) { + private CompletableFuture call(RpcCall rpcCall, + ExtendedCellScanner cellScanner) { CompletableFuture future = new CompletableFuture<>(); HBaseRpcController controller = conn.rpcControllerFactory.newController(null, cellScanner); try { @@ -158,8 +159,8 @@ public CompletableFuture compactRegion(CompactRegionReque return call((stub, controller, done) -> stub.compactRegion(controller, request, done)); } - public CompletableFuture - replicateWALEntry(ReplicateWALEntryRequest request, CellScanner cellScanner, int timeout) { + public CompletableFuture replicateWALEntry( + ReplicateWALEntryRequest request, ExtendedCellScanner cellScanner, int timeout) { return call((stub, controller, done) -> { controller.setCallTimeout(timeout); stub.replicateWALEntry(controller, request, done); @@ -167,7 +168,7 @@ public CompletableFuture compactRegion(CompactRegionReque } public CompletableFuture replay(ReplicateWALEntryRequest request, - CellScanner cellScanner) { + ExtendedCellScanner cellScanner) { return call((stub, controller, done) -> stub.replay(controller, request, done), cellScanner); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java index a6d6940e1e4c..8482a819c0d5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java @@ -20,8 +20,8 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; @@ -46,7 +46,7 @@ static class MessageEncoder extends BaseEncoder { } @Override - public void write(Cell cell) throws IOException { + public void write(ExtendedCell cell) throws IOException { checkFlushed(); CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder(); // This copies bytes from Cell to ByteString. I don't see anyway around the copy. @@ -75,7 +75,7 @@ static class MessageDecoder extends BaseDecoder { } @Override - protected Cell parseCell() throws IOException { + protected ExtendedCell parseCell() throws IOException { return ProtobufUtil.toCell(cellBuilder, CellProtos.Cell.parseDelimitedFrom(this.in), false); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java index 0c32303746c0..9900aa63cab6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; @@ -369,7 +370,7 @@ private void scanKeysValues(Path file, KeyValueStatsCollector fileStats, HFileSc Set foundMobFiles = new LinkedHashSet<>(FOUND_MOB_FILES_CACHE_CAPACITY); Set missingMobFiles = new LinkedHashSet<>(MISSING_MOB_FILES_CACHE_CAPACITY); do { - Cell cell = scanner.getCell(); + ExtendedCell cell = scanner.getCell(); if (row != null && row.length != 0) { int result = CellComparator.getInstance().compareRows(cell, row, 0, row.length); if (result > 0) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java index 0134e11d8914..ce3ea3fd9414 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java @@ -23,7 +23,7 @@ import java.net.InetSocketAddress; import java.nio.channels.ClosedChannelException; import org.apache.hadoop.hbase.CallDroppedException; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.exceptions.TimeoutIOException; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; @@ -111,7 +111,7 @@ public void run() { } Throwable errorThrowable = null; String error = null; - Pair resultPair = null; + Pair resultPair = null; RpcServer.CurCall.set(call); final Span ipcServerSpan = new IpcServerSpanBuilder(call).build(); try (Scope ignored1 = ipcServerSpan.makeCurrent()) { @@ -156,7 +156,7 @@ public void run() { call.cleanup(); // Set the response Message param = resultPair != null ? resultPair.getFirst() : null; - CellScanner cells = resultPair != null ? resultPair.getSecond() : null; + ExtendedCellScanner cells = resultPair != null ? resultPair.getSecond() : null; call.setResponse(param, cells, errorThrowable, error); call.sendResponseIfReady(); // don't touch `span` here because its status and `end()` are managed in `call#setResponse()` diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java index 4f0540da80a7..62cac989d8a7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.net.InetAddress; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.ipc.RpcServer.CallCleanup; import org.apache.yetus.audience.InterfaceAudience; @@ -39,7 +39,7 @@ class NettyServerCall extends ServerCall { NettyServerCall(int id, BlockingService service, MethodDescriptor md, RequestHeader header, - Message param, CellScanner cellScanner, NettyServerRpcConnection connection, long size, + Message param, ExtendedCellScanner cellScanner, NettyServerRpcConnection connection, long size, InetAddress remoteAddress, long receiveTime, int timeout, ByteBuffAllocator bbAllocator, CellBlockBuilder cellBlockBuilder, CallCleanup reqCleanup) { super(id, service, md, header, param, cellScanner, connection, size, remoteAddress, receiveTime, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerRpcConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerRpcConnection.java index f52357539dec..f63b8d2730f7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerRpcConnection.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerRpcConnection.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.ipc.RpcServer.CallCleanup; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.SingleByteBuff; @@ -107,7 +107,7 @@ public boolean isConnectionOpen() { @Override public NettyServerCall createCall(int id, final BlockingService service, - final MethodDescriptor md, RequestHeader header, Message param, CellScanner cellScanner, + final MethodDescriptor md, RequestHeader header, Message param, ExtendedCellScanner cellScanner, long size, final InetAddress remoteAddress, int timeout, CallCleanup reqCleanup) { return new NettyServerCall(id, service, md, header, param, cellScanner, this, size, remoteAddress, EnvironmentEdgeManager.currentTime(), timeout, this.rpcServer.bbAllocator, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcCall.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcCall.java index 2d06aa7c47af..804d7b32bb42 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcCall.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcCall.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; @@ -48,7 +48,7 @@ public interface RpcCall extends RpcCallContext { Message getParam(); /** Returns The CellScanner that can carry input and result payload. */ - CellScanner getCellScanner(); + ExtendedCellScanner getCellScanner(); /** Returns The timestamp when the call is constructed. */ long getReceiveTime(); @@ -117,7 +117,8 @@ public interface RpcCall extends RpcCallContext { * @param errorThrowable The error Throwable resulting from the call. * @param error Extra error message. */ - void setResponse(Message param, CellScanner cells, Throwable errorThrowable, String error); + void setResponse(Message param, ExtendedCellScanner cells, Throwable errorThrowable, + String error); /** * Send the response of this RPC call. Implementation provides the underlying facility diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index a84d132a0132..4ff1a0b54828 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.CallQueueTooBigException; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.conf.ConfigurationObserver; @@ -428,7 +429,7 @@ public void setSecretManager(SecretManager secretMana * the protobuf response. */ @Override - public Pair call(RpcCall call, MonitoredRPCHandler status) + public Pair call(RpcCall call, MonitoredRPCHandler status) throws IOException { try { MethodDescriptor md = call.getMethod(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java index 2c0dd1cc2b0e..9bf5fc3817dc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.namequeues.NamedQueueRecorder; @@ -45,7 +45,8 @@ public interface RpcServerInterface { InetSocketAddress getListenerAddress(); - Pair call(RpcCall call, MonitoredRPCHandler status) throws IOException; + Pair call(RpcCall call, MonitoredRPCHandler status) + throws IOException; void setErrorHandler(HBaseRPCErrorHandler handler); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerCall.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerCall.java index 25d153c068aa..c2c6b4d063c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerCall.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerCall.java @@ -30,8 +30,8 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseServerException; import org.apache.hadoop.hbase.exceptions.RegionMovedException; import org.apache.hadoop.hbase.io.ByteBuffAllocator; @@ -72,7 +72,7 @@ public abstract class ServerCall implements RpcCa protected final RequestHeader header; protected Message param; // the parameter passed // Optional cell data passed outside of protobufs. - protected final CellScanner cellScanner; + protected final ExtendedCellScanner cellScanner; protected final T connection; // connection to client protected final long receiveTime; // the time received when response is null // the time served when response is not null @@ -120,8 +120,8 @@ public abstract class ServerCall implements RpcCa @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "NP_NULL_ON_SOME_PATH", justification = "Can't figure why this complaint is happening... see below") ServerCall(int id, BlockingService service, MethodDescriptor md, RequestHeader header, - Message param, CellScanner cellScanner, T connection, long size, InetAddress remoteAddress, - long receiveTime, int timeout, ByteBuffAllocator byteBuffAllocator, + Message param, ExtendedCellScanner cellScanner, T connection, long size, + InetAddress remoteAddress, long receiveTime, int timeout, ByteBuffAllocator byteBuffAllocator, CellBlockBuilder cellBlockBuilder, CallCleanup reqCleanup) { this.id = id; this.service = service; @@ -273,7 +273,7 @@ public String toShortString() { } @Override - public synchronized void setResponse(Message m, final CellScanner cells, Throwable t, + public synchronized void setResponse(Message m, final ExtendedCellScanner cells, Throwable t, String errorMsg) { if (this.isError) { return; @@ -544,7 +544,7 @@ public Message getParam() { } @Override - public CellScanner getCellScanner() { + public ExtendedCellScanner getCellScanner() { return cellScanner; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java index 31f46f30c382..c17a8da90416 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcConnection.java @@ -39,8 +39,8 @@ import org.apache.commons.crypto.cipher.CryptoCipherFactory; import org.apache.commons.crypto.random.CryptoRandom; import org.apache.commons.crypto.random.CryptoRandomFactory; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.client.ConnectionRegistryEndpoint; import org.apache.hadoop.hbase.client.VersionInfoUtil; import org.apache.hadoop.hbase.codec.Codec; @@ -588,7 +588,7 @@ protected void processRequest(ByteBuff buf) throws IOException, InterruptedExcep } MethodDescriptor md = null; Message param = null; - CellScanner cellScanner = null; + ExtendedCellScanner cellScanner = null; try { if (header.hasRequestParam() && header.getRequestParam()) { md = this.service.getDescriptorForType().findMethodByName(header.getMethodName()); @@ -816,7 +816,7 @@ boolean isSimpleAuthentication() { public abstract boolean isConnectionOpen(); public abstract ServerCall createCall(int id, BlockingService service, MethodDescriptor md, - RequestHeader header, Message param, CellScanner cellScanner, long size, + RequestHeader header, Message param, ExtendedCellScanner cellScanner, long size, InetAddress remoteAddress, int timeout, CallCleanup reqCleanup); private static class ByteBuffByteInput extends ByteInput { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerCall.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerCall.java index 5c5e9102115c..aa6e2b7b4aca 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerCall.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerCall.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.net.InetAddress; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.ipc.RpcServer.CallCleanup; import org.apache.yetus.audience.InterfaceAudience; @@ -43,7 +43,7 @@ class SimpleServerCall extends ServerCall { @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "NP_NULL_ON_SOME_PATH", justification = "Can't figure why this complaint is happening... see below") SimpleServerCall(int id, final BlockingService service, final MethodDescriptor md, - RequestHeader header, Message param, CellScanner cellScanner, + RequestHeader header, Message param, ExtendedCellScanner cellScanner, SimpleServerRpcConnection connection, long size, final InetAddress remoteAddress, long receiveTime, int timeout, ByteBuffAllocator bbAllocator, CellBlockBuilder cellBlockBuilder, CallCleanup reqCleanup, SimpleRpcServerResponder responder) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java index 1b28c19b4306..e8619b2eb7f5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleServerRpcConnection.java @@ -30,8 +30,8 @@ import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.client.VersionInfoUtil; import org.apache.hadoop.hbase.exceptions.RequestTooBigException; import org.apache.hadoop.hbase.ipc.RpcServer.CallCleanup; @@ -456,7 +456,7 @@ public boolean isConnectionOpen() { @Override public SimpleServerCall createCall(int id, BlockingService service, MethodDescriptor md, - RequestHeader header, Message param, CellScanner cellScanner, long size, + RequestHeader header, Message param, ExtendedCellScanner cellScanner, long size, InetAddress remoteAddress, int timeout, CallCleanup reqCleanup) { return new SimpleServerCall(id, service, md, header, param, cellScanner, this, size, remoteAddress, EnvironmentEdgeManager.currentTime(), timeout, this.rpcServer.bbAllocator, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java index 2fadc83340ed..66160dd4aa64 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java @@ -120,7 +120,7 @@ public static Date parseDate(String dateString) throws ParseException { * @param cell The current cell. * @return True if the cell has a mob reference tag, false if it doesn't. */ - public static boolean isMobReferenceCell(Cell cell) { + public static boolean isMobReferenceCell(ExtendedCell cell) { if (cell.getTagsLength() > 0) { Optional tag = PrivateCellUtil.getTag(cell, TagType.MOB_REFERENCE_TAG_TYPE); if (tag.isPresent()) { @@ -135,7 +135,7 @@ public static boolean isMobReferenceCell(Cell cell) { * @param cell The current cell. * @return The table name tag. */ - private static Optional getTableNameTag(Cell cell) { + private static Optional getTableNameTag(ExtendedCell cell) { Optional tag = Optional.empty(); if (cell.getTagsLength() > 0) { tag = PrivateCellUtil.getTag(cell, TagType.MOB_TABLE_NAME_TAG_TYPE); @@ -148,7 +148,7 @@ private static Optional getTableNameTag(Cell cell) { * @param cell to extract tag from * @return table name as a string. empty if the tag is not found. */ - public static Optional getTableNameString(Cell cell) { + public static Optional getTableNameString(ExtendedCell cell) { Optional tag = getTableNameTag(cell); Optional name = Optional.empty(); if (tag.isPresent()) { @@ -162,7 +162,7 @@ public static Optional getTableNameString(Cell cell) { * @param cell to extract tag from * @return name of table as a TableName. empty if the tag is not found. */ - public static Optional getTableName(Cell cell) { + public static Optional getTableName(ExtendedCell cell) { Optional maybe = getTableNameTag(cell); Optional name = Optional.empty(); if (maybe.isPresent()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtobufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtobufUtil.java index 6754fdef08ab..0b1736e21a46 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtobufUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtobufUtil.java @@ -24,10 +24,11 @@ import java.util.concurrent.CompletableFuture; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.client.AsyncRegionServerAdmin; -import org.apache.hadoop.hbase.io.SizedCellScanner; +import org.apache.hadoop.hbase.io.SizedExtendedCellScanner; import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WAL.Entry; @@ -56,8 +57,8 @@ public class ReplicationProtobufUtil { public static CompletableFuture replicateWALEntry( AsyncRegionServerAdmin admin, Entry[] entries, String replicationClusterId, Path sourceBaseNamespaceDir, Path sourceHFileArchiveDir, int timeout) { - Pair p = buildReplicateWALEntryRequest(entries, null, - replicationClusterId, sourceBaseNamespaceDir, sourceHFileArchiveDir); + Pair p = buildReplicateWALEntryRequest(entries, + null, replicationClusterId, sourceBaseNamespaceDir, sourceHFileArchiveDir); return admin.replicateWALEntry(p.getFirst(), p.getSecond(), timeout); } @@ -66,7 +67,7 @@ public static CompletableFuture replicateWALEntry( * @param entries the WAL entries to be replicated * @return a pair of ReplicateWALEntryRequest and a CellScanner over all the WALEdit values found. */ - public static Pair + public static Pair buildReplicateWALEntryRequest(final Entry[] entries) { return buildReplicateWALEntryRequest(entries, null, null, null, null); } @@ -81,11 +82,11 @@ public static CompletableFuture replicateWALEntry( * @param sourceHFileArchiveDir Path to the source cluster hfile archive directory * @return a pair of ReplicateWALEntryRequest and a CellScanner over all the WALEdit values found. */ - public static Pair buildReplicateWALEntryRequest( + public static Pair buildReplicateWALEntryRequest( final Entry[] entries, byte[] encodedRegionName, String replicationClusterId, Path sourceBaseNamespaceDir, Path sourceHFileArchiveDir) { // Accumulate all the Cells seen in here. - List> allCells = new ArrayList<>(entries.length); + List> allCells = new ArrayList<>(entries.length); int size = 0; WALEntry.Builder entryBuilder = WALEntry.newBuilder(); ReplicateWALEntryRequest.Builder builder = ReplicateWALEntryRequest.newBuilder(); @@ -104,7 +105,8 @@ public static Pair buildReplicateWALEntry } entryBuilder.setKey(keyBuilder.build()); WALEdit edit = entry.getEdit(); - List cells = edit.getCells(); + // TODO: avoid this cast + List cells = (List) edit.getCells(); // Add up the size. It is used later serializing out the kvs. for (Cell cell : cells) { size += PrivateCellUtil.estimatedSerializedSizeOf(cell); @@ -130,21 +132,24 @@ public static Pair buildReplicateWALEntry } /** Returns cells packaged as a CellScanner */ - static CellScanner getCellScanner(final List> cells, final int size) { - return new SizedCellScanner() { - private final Iterator> entries = cells.iterator(); - private Iterator currentIterator = null; - private Cell currentCell; + static ExtendedCellScanner getCellScanner(final List> cells, + final int size) { + return new SizedExtendedCellScanner() { + private final Iterator> entries = cells.iterator(); + private Iterator currentIterator = null; + private ExtendedCell currentCell; @Override - public Cell current() { + public ExtendedCell current() { return this.currentCell; } @Override public boolean advance() { if (this.currentIterator == null) { - if (!this.entries.hasNext()) return false; + if (!this.entries.hasNext()) { + return false; + } this.currentIterator = this.entries.next().iterator(); } if (this.currentIterator.hasNext()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 2792ab2754cd..68f5356f5549 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -112,6 +112,7 @@ import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionReplicaUtil; @@ -3512,7 +3513,7 @@ protected void checkAndPrepareMutation(int index, long timestamp) throws IOExcep // store the family map reference to allow for mutations // we know that in mutation, only ExtendedCells are allow so here we do a fake cast, to // simplify later logic - familyCellMaps[index] = (Map) mutation.getFamilyCellMap(); + familyCellMaps[index] = PackagePrivateFieldAccessor.getExtendedFamilyCellMap(mutation); } // store durability for the batch (highest durability of all operations in the batch) @@ -4063,7 +4064,8 @@ private Map> reckonDeltas(Mutation mutation, assert mutation instanceof Increment || mutation instanceof Append; Map> ret = new TreeMap<>(Bytes.BYTES_COMPARATOR); // Process a Store/family at a time. - for (Map.Entry> entry : mutation.getFamilyCellMap().entrySet()) { + for (Map.Entry> entry : PackagePrivateFieldAccessor + .getExtendedFamilyCellMap(mutation).entrySet()) { final byte[] columnFamilyName = entry.getKey(); List deltas = (List) entry.getValue(); // Reckon for the Store what to apply to WAL and MemStore. @@ -4184,9 +4186,9 @@ private List reckonDeltasByStore(HStore store, Mutation mutation, return cellPairs.stream().map(Pair::getSecond).collect(Collectors.toList()); } - private static ExtendedCell reckonDelta(final Cell delta, final Cell currentCell, - final byte[] columnFamily, final long now, Mutation mutation, Function supplier) - throws IOException { + private static ExtendedCell reckonDelta(final ExtendedCell delta, + final ExtendedCell currentCell, final byte[] columnFamily, final long now, Mutation mutation, + Function supplier) throws IOException { // Forward any tags found on the delta. List tags = TagUtil.carryForwardTags(delta); if (currentCell != null) { @@ -4204,7 +4206,6 @@ private static ExtendedCell reckonDelta(final Cell delta, final Cell currentCell } else { tags = TagUtil.carryForwardTTLTag(tags, mutation.getTTL()); PrivateCellUtil.updateLatestStamp(delta, now); - assert delta instanceof ExtendedCell; ExtendedCell deltaCell = (ExtendedCell) delta; return CollectionUtils.isEmpty(tags) ? deltaCell @@ -4522,7 +4523,8 @@ private void checkAndMergeCPMutations(final MiniBatchOperationInProgress> cpFamilyMap = (Map) cpMutation.getFamilyCellMap(); + Map> cpFamilyMap = + PackagePrivateFieldAccessor.getExtendedFamilyCellMap(cpMutation); region.rewriteCellTags(cpFamilyMap, mutation); // will get added to the memStore later mergeFamilyMaps(familyCellMaps[i], cpFamilyMap); @@ -5094,14 +5096,16 @@ private CheckAndMutateResult checkAndMutateInternal(CheckAndMutate checkAndMutat byte[] byteTs = Bytes.toBytes(ts); if (mutation != null) { if (mutation instanceof Put) { - updateCellTimestamps((Iterable) mutation.getFamilyCellMap().values(), byteTs); + updateCellTimestamps( + PackagePrivateFieldAccessor.getExtendedFamilyCellMap(mutation).values(), byteTs); } // And else 'delete' is not needed since it already does a second get, and sets the // timestamp from get (see prepareDeleteTimestamps). } else { for (Mutation m : rowMutations.getMutations()) { if (m instanceof Put) { - updateCellTimestamps((Iterable) m.getFamilyCellMap().values(), byteTs); + updateCellTimestamps( + PackagePrivateFieldAccessor.getExtendedFamilyCellMap(m).values(), byteTs); } } // And else 'delete' is not needed since it already does a second get, and sets the diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MobReferenceOnlyFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MobReferenceOnlyFilter.java index 4efc29dea100..f3191c6374d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MobReferenceOnlyFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MobReferenceOnlyFilter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.yetus.audience.InterfaceAudience; @@ -30,9 +31,9 @@ class MobReferenceOnlyFilter extends FilterBase { @Override public ReturnCode filterCell(final Cell cell) { - if (null != cell) { + if (null != cell && cell instanceof ExtendedCell) { // If a cell with a mob reference tag, it's included. - if (MobUtils.isMobReferenceCell(cell)) { + if (MobUtils.isMobReferenceCell((ExtendedCell) cell)) { return ReturnCode.INCLUDE; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 66c97fb9401f..47011c5b5520 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -47,11 +47,12 @@ import org.apache.hadoop.hbase.CacheEvictionStats; import org.apache.hadoop.hbase.CacheEvictionStatsBuilder; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DroppedSnapshotException; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseRpcServicesBase; import org.apache.hadoop.hbase.HConstants; @@ -72,6 +73,7 @@ import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionReplicaUtil; @@ -581,7 +583,7 @@ private void addResults(ScanResponse.Builder builder, List results, builder.addCellsPerResult(res.size()); builder.addPartialFlagPerResult(res.mayHaveMoreCellsInRow()); } - controller.setCellScanner(CellUtil.createCellScanner(results)); + controller.setCellScanner(PrivateCellUtil.createExtendedCellScanner(results)); } else { for (Result res : results) { ClientProtos.Result pbr = ProtobufUtil.toResult(res); @@ -725,10 +727,10 @@ private Result increment(final HRegion region, final OperationQuota quota, * @param context the current RpcCallContext * @return Return the cellScanner passed */ - private List doNonAtomicRegionMutation(final HRegion region, + private List doNonAtomicRegionMutation(final HRegion region, final OperationQuota quota, final RegionAction actions, final CellScanner cellScanner, - final RegionActionResult.Builder builder, List cellsToReturn, long nonceGroup, - final RegionScannersCloseCallBack closeCallBack, RpcCallContext context, + final RegionActionResult.Builder builder, List cellsToReturn, + long nonceGroup, final RegionScannersCloseCallBack closeCallBack, RpcCallContext context, ActivePolicyEnforcement spaceQuotaEnforcement) { // Gather up CONTIGUOUS Puts and Deletes in this mutations List. Idea is that rather than do // one at a time, we instead pass them in batch. Be aware that the corresponding @@ -2053,9 +2055,9 @@ public WarmupRegionResponse warmupRegion(final RpcController controller, return response; } - private CellScanner getAndReset(RpcController controller) { + private ExtendedCellScanner getAndReset(RpcController controller) { HBaseRpcController hrc = (HBaseRpcController) controller; - CellScanner cells = hrc.cellScanner(); + ExtendedCellScanner cells = hrc.cellScanner(); hrc.setCellScanner(null); return cells; } @@ -2498,8 +2500,8 @@ public GetResponse get(final RpcController controller, final GetRequest request) && VersionInfoUtil.hasMinimumVersion(context.getClientVersionInfo(), 1, 3) ) { pbr = ProtobufUtil.toResultNoData(r); - ((HBaseRpcController) controller) - .setCellScanner(CellUtil.createCellScanner(r.rawCells())); + ((HBaseRpcController) controller).setCellScanner(PrivateCellUtil + .createExtendedCellScanner(PackagePrivateFieldAccessor.getExtendedRawCells(r))); addSize(context, r); } else { pbr = ProtobufUtil.toResult(r); @@ -2648,10 +2650,7 @@ public MultiResponse multi(final RpcController rpcc, final MultiRequest request) // rpc controller is how we bring in data via the back door; it is unprotobuf'ed data. // It is also the conduit via which we pass back data. HBaseRpcController controller = (HBaseRpcController) rpcc; - CellScanner cellScanner = controller != null ? controller.cellScanner() : null; - if (controller != null) { - controller.setCellScanner(null); - } + CellScanner cellScanner = controller != null ? getAndReset(controller) : null; long nonceGroup = request.hasNonceGroup() ? request.getNonceGroup() : HConstants.NO_NONCE; @@ -2732,7 +2731,7 @@ public MultiResponse multi(final RpcController rpcc, final MultiRequest request) } // this will contain all the cells that we need to return. It's created later, if needed. - List cellsToReturn = null; + List cellsToReturn = null; RegionScannersCloseCallBack closeCallBack = null; RpcCallContext context = RpcServer.getCurrentCall().orElse(null); Map regionStats = @@ -2858,7 +2857,7 @@ public MultiResponse multi(final RpcController rpcc, final MultiRequest request) } // Load the controller with the Cells to return. if (cellsToReturn != null && !cellsToReturn.isEmpty() && controller != null) { - controller.setCellScanner(CellUtil.createCellScanner(cellsToReturn)); + controller.setCellScanner(PrivateCellUtil.createExtendedCellScanner(cellsToReturn)); } MultiRegionLoadStats.Builder builder = MultiRegionLoadStats.newBuilder(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedMobStoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedMobStoreScanner.java index 505cd5dedcee..81a4cc467f98 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedMobStoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReversedMobStoreScanner.java @@ -75,7 +75,7 @@ public boolean next(List outResult, ScannerContext ctx) throws IOException for (int i = 0; i < outResult.size(); i++) { Cell cell = outResult.get(i); assert cell instanceof ExtendedCell; - if (MobUtils.isMobReferenceCell(cell)) { + if (MobUtils.isMobReferenceCell((ExtendedCell) cell)) { MobCell mobCell = mobStore.resolve((ExtendedCell) cell, cacheMobBlocks, readPt, readEmptyValueOnMobCellMiss); mobKVCount++; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java index bb874a001d2f..dff2052efe78 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.exceptions.TimeoutIOException; import org.apache.hadoop.hbase.io.ByteBufferWriter; @@ -139,7 +140,7 @@ public void append(Entry entry) { } try { for (Cell cell : entry.getEdit().getCells()) { - cellEncoder.write(cell); + cellEncoder.write((ExtendedCell) cell); } } catch (IOException e) { throw new AssertionError("should not happen", e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java index 52317949cc83..6e6727db085d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.StreamCapabilities; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor; import org.apache.hadoop.hbase.util.AtomicUtils; import org.apache.hadoop.hbase.util.CommonFSUtils; @@ -58,7 +59,7 @@ public void append(Entry entry) throws IOException { .writeDelimitedTo(output); for (Cell cell : entry.getEdit().getCells()) { // cellEncoder must assume little about the stream, since we write PB and cells in turn. - cellEncoder.write(cell); + cellEncoder.write((ExtendedCell) cell); } length.set(output.getPos()); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java index 754368f73f3c..ec47283469c9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java @@ -24,7 +24,6 @@ import java.io.OutputStream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -79,7 +78,7 @@ public EncryptedKvDecoder(InputStream in, Decryptor decryptor) { } @Override - protected Cell parseCell() throws IOException { + protected ExtendedCell parseCell() throws IOException { if (this.decryptor == null) { return super.parseCell(); } @@ -174,7 +173,7 @@ public EncryptedKvEncoder(OutputStream os, Encryptor encryptor) { } @Override - public void write(Cell c) throws IOException { + public void write(ExtendedCell c) throws IOException { if (encryptor == null) { super.write(c); return; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index e6a20b0d0206..87154a62066c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -233,9 +233,7 @@ public CompressedKvEncoder(OutputStream out, CompressionContext compression) { } @Override - public void write(Cell c) throws IOException { - assert c instanceof ExtendedCell; - ExtendedCell cell = (ExtendedCell) c; + public void write(ExtendedCell cell) throws IOException { // We first write the KeyValue infrastructure as VInts. StreamUtils.writeRawVInt32(out, KeyValueUtil.keyLength(cell)); StreamUtils.writeRawVInt32(out, cell.getValueLength()); @@ -290,7 +288,7 @@ public CompressedKvDecoder(InputStream in, CompressionContext compression) { } @Override - protected Cell parseCell() throws IOException { + protected ExtendedCell parseCell() throws IOException { int keylength = StreamUtils.readRawVarint32(in); int vlength = StreamUtils.readRawVarint32(in); int tagsLength = StreamUtils.readRawVarint32(in); @@ -396,7 +394,7 @@ public EnsureKvEncoder(OutputStream out) { } @Override - public void write(Cell cell) throws IOException { + public void write(ExtendedCell cell) throws IOException { checkFlushed(); // Make sure to write tags into WAL ByteBufferUtils.putInt(this.out, KeyValueUtil.getSerializedSize(cell, true)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplaySyncReplicationWALCallable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplaySyncReplicationWALCallable.java index a60186c13dc7..427fe80b0c36 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplaySyncReplicationWALCallable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplaySyncReplicationWALCallable.java @@ -23,8 +23,8 @@ import java.util.List; import java.util.concurrent.locks.Lock; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.executor.EventType; import org.apache.hadoop.hbase.procedure2.BaseRSProcedureCallable; import org.apache.hadoop.hbase.protobuf.ReplicationProtobufUtil; @@ -106,8 +106,9 @@ private void replayWAL(String wal) throws IOException { try { List entries = readWALEntries(reader, wal); while (!entries.isEmpty()) { - Pair pair = ReplicationProtobufUtil - .buildReplicateWALEntryRequest(entries.toArray(new Entry[entries.size()])); + Pair pair = + ReplicationProtobufUtil + .buildReplicateWALEntryRequest(entries.toArray(new Entry[entries.size()])); ReplicateWALEntryRequest request = pair.getFirst(); rs.getReplicationSinkService().replicateLogEntries(request.getEntryList(), pair.getSecond(), request.getReplicationClusterId(), request.getSourceBaseNamespaceDirPath(), diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index d0c19d7cfcd5..f0158f299f22 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -35,13 +35,13 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.CompoundConfiguration; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -644,7 +644,7 @@ private void checkForReservedTagPresence(User user, Mutation m) throws IOExcepti if (m.getAttribute(TAG_CHECK_PASSED) != null) { return; } - for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { + for (ExtendedCellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { Iterator tagsItr = PrivateCellUtil.tagsIterator(cellScanner.current()); while (tagsItr.hasNext()) { if (tagsItr.next().getType() == PermissionStorage.ACL_TAG_TYPE) { @@ -1732,8 +1732,9 @@ private Cell createNewCellWithTags(Mutation mutation, Cell oldCell, Cell newCell // there is no need to rewrite them again. Just extract non-acl tags of newCell if we need to // add a new acl tag for the cell. Actually, oldCell is useless here. List tags = Lists.newArrayList(); - if (newCell != null) { - Iterator tagIterator = PrivateCellUtil.tagsIterator(newCell); + ExtendedCell newExtendedCell = (ExtendedCell) newCell; + if (newExtendedCell != null) { + Iterator tagIterator = PrivateCellUtil.tagsIterator(newExtendedCell); while (tagIterator.hasNext()) { Tag tag = tagIterator.next(); if (tag.getType() != PermissionStorage.ACL_TAG_TYPE) { @@ -1750,8 +1751,7 @@ private Cell createNewCellWithTags(Mutation mutation, Cell oldCell, Cell newCell // We have checked the ACL tag of mutation is not null. // So that the tags could not be empty. tags.add(new ArrayBackedTag(PermissionStorage.ACL_TAG_TYPE, mutation.getACL())); - assert newCell instanceof ExtendedCell; - return PrivateCellUtil.createCell((ExtendedCell) newCell, tags); + return PrivateCellUtil.createCell(newExtendedCell, tags); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthManager.java index 023eccbd27d3..c27063752c58 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthManager.java @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.security.Superusers; @@ -452,7 +453,9 @@ private boolean authorizeFamily(Set permissions, TableName tabl */ public boolean authorizeCell(User user, TableName table, Cell cell, Permission.Action action) { try { - List perms = PermissionStorage.getCellPermissionsForUser(user, cell); + assert cell instanceof ExtendedCell; + List perms = + PermissionStorage.getCellPermissionsForUser(user, (ExtendedCell) cell); if (LOG.isTraceEnabled()) { LOG.trace("Perms for user {} in table {} in cell {}: {}", user.getShortName(), table, cell, (perms != null ? perms : "")); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/PermissionStorage.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/PermissionStorage.java index 7a4444291017..b66c0ed0b099 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/PermissionStorage.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/PermissionStorage.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; @@ -866,7 +867,7 @@ public static byte[] fromNamespaceEntry(byte[] namespace) { return Arrays.copyOfRange(namespace, 1, namespace.length); } - public static List getCellPermissionsForUser(User user, Cell cell) + public static List getCellPermissionsForUser(User user, ExtendedCell cell) throws IOException { // Save an object allocation where we can if (cell.getTagsLength() == 0) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCustomPriorityRpcControllerFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCustomPriorityRpcControllerFactory.java index fde491983cea..435037ab17d2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCustomPriorityRpcControllerFactory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestCustomPriorityRpcControllerFactory.java @@ -281,12 +281,12 @@ public HBaseRpcController newController() { } @Override - public HBaseRpcController newController(CellScanner cellScanner) { + public HBaseRpcController newController(ExtendedCellScanner cellScanner) { return new PriorityController(EXPECTED_PRIORITY.get(), super.newController(cellScanner)); } @Override - public HBaseRpcController newController(List cellIterables) { + public HBaseRpcController newController(List cellIterables) { return new PriorityController(EXPECTED_PRIORITY.get(), super.newController(cellIterables)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index 8e5895c6914e..2e4a13fc8ce4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -232,7 +233,7 @@ public void testKeepDeletedCells() throws Exception { s.setTimeRange(0, ts + 3); s.readAllVersions(); ResultScanner scanner = h.getScanner(s); - Cell[] kvs = scanner.next().rawCells(); + ExtendedCell[] kvs = scanner.next().rawExtendedCells(); assertArrayEquals(T2, CellUtil.cloneValue(kvs[0])); assertArrayEquals(T1, CellUtil.cloneValue(kvs[1])); scanner.close(); @@ -241,7 +242,7 @@ public void testKeepDeletedCells() throws Exception { s.setRaw(true); s.readAllVersions(); scanner = h.getScanner(s); - kvs = scanner.next().rawCells(); + kvs = scanner.next().rawExtendedCells(); assertTrue(PrivateCellUtil.isDeleteFamily(kvs[0])); assertArrayEquals(T3, CellUtil.cloneValue(kvs[1])); assertTrue(CellUtil.isDelete(kvs[2])); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java index f2b0f75724d8..be70c4c5cd64 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; @@ -566,8 +567,8 @@ public void testIncrementWithTtlTags() throws Exception { int count = 0; Result result; while ((result = scanner.next()) != null) { - Cell[] cells = result.rawCells(); - for (Cell cell : cells) { + ExtendedCell[] cells = result.rawExtendedCells(); + for (ExtendedCell cell : cells) { List tags = PrivateCellUtil.getTags(cell); // Make sure there is only 1 tag. assertEquals(1, tags.size()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMalformedCellFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMalformedCellFromClient.java index 64f4c89a34fa..37422c1f1a0f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMalformedCellFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMalformedCellFromClient.java @@ -32,9 +32,11 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -199,14 +201,15 @@ public void testAtomicOperations() throws Exception { ProtobufUtil.toMutationNoData(ClientProtos.MutationProto.MutationType.PUT, put)))) .build(); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (Mutation m : rm.getMutations()) { cells.addAll(m.getCellList(FAMILY)); } cells.addAll(put.getCellList(FAMILY)); assertEquals(3, cells.size()); HBaseRpcController controller = Mockito.mock(HBaseRpcController.class); - Mockito.when(controller.cellScanner()).thenReturn(CellUtil.createCellScanner(cells)); + Mockito.when(controller.cellScanner()) + .thenReturn(PrivateCellUtil.createExtendedCellScanner(cells)); HRegionServer rs = TEST_UTIL.getMiniHBaseCluster().getRegionServer(TEST_UTIL .getMiniHBaseCluster().getServerHoldingRegion(TABLE_NAME, r.getRegionInfo().getRegionName())); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java index 272bf1c46096..2481cb200d80 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java @@ -24,7 +24,8 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.CellOutputStream; import org.apache.hadoop.hbase.util.Bytes; @@ -42,8 +43,8 @@ public class CodecPerformance { @Deprecated public static final Logger LOG = LoggerFactory.getLogger(CodecPerformance.class); - static Cell[] getCells(final int howMany) { - Cell[] cells = new Cell[howMany]; + static ExtendedCell[] getCells(final int howMany) { + ExtendedCell[] cells = new ExtendedCell[howMany]; for (int i = 0; i < howMany; i++) { byte[] index = Bytes.toBytes(i); KeyValue kv = new KeyValue(index, Bytes.toBytes("f"), index, index); @@ -62,7 +63,7 @@ static int getRoughSize(final Cell[] cells) { } static byte[] runEncoderTest(final int index, final int initialBufferSize, - final ByteArrayOutputStream baos, final CellOutputStream encoder, final Cell[] cells) + final ByteArrayOutputStream baos, final CellOutputStream encoder, final ExtendedCell[] cells) throws IOException { long startTime = EnvironmentEdgeManager.currentTime(); for (int i = 0; i < cells.length; i++) { @@ -76,9 +77,9 @@ static byte[] runEncoderTest(final int index, final int initialBufferSize, return baos.toByteArray(); } - static Cell[] runDecoderTest(final int index, final int count, final CellScanner decoder) - throws IOException { - Cell[] cells = new Cell[count]; + static ExtendedCell[] runDecoderTest(final int index, final int count, + final ExtendedCellScanner decoder) throws IOException { + ExtendedCell[] cells = new ExtendedCell[count]; long startTime = EnvironmentEdgeManager.currentTime(); for (int i = 0; decoder.advance(); i++) { cells[i] = decoder.current(); @@ -94,10 +95,10 @@ static void verifyCells(final Cell[] input, final Cell[] output) { assertArrayEquals(input, output); } - static void doCodec(final Codec codec, final Cell[] cells, final int cycles, final int count, - final int initialBufferSize) throws IOException { + static void doCodec(final Codec codec, final ExtendedCell[] cells, final int cycles, + final int count, final int initialBufferSize) throws IOException { byte[] bytes = null; - Cell[] cellsDecoded = null; + ExtendedCell[] cellsDecoded = null; for (int i = 0; i < cycles; i++) { ByteArrayOutputStream baos = new ByteArrayOutputStream(initialBufferSize); Codec.Encoder encoder = codec.getEncoder(baos); @@ -117,7 +118,7 @@ public static void main(String[] args) throws IOException { // How many times to do an operation; repeat gives hotspot chance to warm up. final int cycles = 30; - Cell[] cells = getCells(count); + ExtendedCell[] cells = getCells(count); int size = getRoughSize(cells); int initialBufferSize = 2 * size; // Multiply by 2 to ensure we don't have to grow buffer diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java index d580c3d7ff18..657f770dd32a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java @@ -26,10 +26,10 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -116,14 +116,14 @@ public void testThree() throws IOException { DataInputStream dis = new DataInputStream(cis); Codec.Decoder decoder = cmc.getDecoder(dis); assertTrue(decoder.advance()); - Cell c = decoder.current(); - assertTrue(CellUtil.equals(c, kv1)); + ExtendedCell c = decoder.current(); + assertTrue(PrivateCellUtil.equals(c, kv1)); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellUtil.equals(c, kv2)); + assertTrue(PrivateCellUtil.equals(c, kv2)); assertTrue(decoder.advance()); c = decoder.current(); - assertTrue(CellUtil.equals(c, kv3)); + assertTrue(PrivateCellUtil.equals(c, kv3)); assertFalse(decoder.advance()); dis.close(); assertEquals(offset, cis.getCount()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestPostIncrementAndAppendBeforeWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestPostIncrementAndAppendBeforeWAL.java index 406618295ca0..64f47e1d1bac 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestPostIncrementAndAppendBeforeWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestPostIncrementAndAppendBeforeWAL.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; @@ -247,7 +248,7 @@ public void testAppendTTLWithACLTag() throws Exception { } } - private static boolean checkAclTag(byte[] acl, Cell cell) { + private static boolean checkAclTag(byte[] acl, ExtendedCell cell) { Iterator iter = PrivateCellUtil.tagsIterator(cell); while (iter.hasNext()) { Tag tag = iter.next(); @@ -342,7 +343,10 @@ public List> postIncrementBeforeWAL( List> cellPairs) throws IOException { List> result = super.postIncrementBeforeWAL(ctx, mutation, cellPairs); for (Pair pair : result) { - if (mutation.getACL() != null && !checkAclTag(mutation.getACL(), pair.getSecond())) { + if ( + mutation.getACL() != null + && !checkAclTag(mutation.getACL(), (ExtendedCell) pair.getSecond()) + ) { throw new DoNotRetryIOException("Unmatched ACL tag."); } } @@ -355,7 +359,10 @@ public List> postAppendBeforeWAL( List> cellPairs) throws IOException { List> result = super.postAppendBeforeWAL(ctx, mutation, cellPairs); for (Pair pair : result) { - if (mutation.getACL() != null && !checkAclTag(mutation.getACL(), pair.getSecond())) { + if ( + mutation.getACL() != null + && !checkAclTag(mutation.getACL(), (ExtendedCell) pair.getSecond()) + ) { throw new DoNotRetryIOException("Unmatched ACL tag."); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index f64381a8a22e..26144d4adea4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -554,12 +555,14 @@ Lists. newArrayList( // Value for fam:qual1 should be stripped: assertEquals(Filter.ReturnCode.INCLUDE, flist.filterCell(kvQual1)); - final KeyValue transformedQual1 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual1)); + final KeyValue transformedQual1 = + KeyValueUtil.ensureKeyValue((ExtendedCell) flist.transformCell(kvQual1)); assertEquals(0, transformedQual1.getValueLength()); // Value for fam:qual2 should not be stripped: assertEquals(Filter.ReturnCode.INCLUDE, flist.filterCell(kvQual2)); - final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2)); + final KeyValue transformedQual2 = + KeyValueUtil.ensureKeyValue((ExtendedCell) flist.transformCell(kvQual2)); assertEquals("value", Bytes.toString(transformedQual2.getValueArray(), transformedQual2.getValueOffset(), transformedQual2.getValueLength())); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java index 024fb04c6873..afbaceebeea1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/compress/HFileTestBase.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -88,7 +88,7 @@ public void doTest(Configuration conf, Path path, Compression.Algorithm compress scanner = reader.getScanner(conf, false, false); assertTrue("Initial seekTo failed", scanner.seekTo()); do { - Cell kv = scanner.getCell(); + ExtendedCell kv = scanner.getCell(); assertTrue("Read back an unexpected or invalid KV", testKvs.contains(KeyValueUtil.ensureKeyValue(kv))); i++; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java index 68ce5e359f56..24d69d88c101 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java @@ -24,11 +24,11 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.codec.Codec.Decoder; import org.apache.hadoop.hbase.codec.Codec.Encoder; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; @@ -127,11 +127,11 @@ public void testKVCodecWithTagsForDecodedCellsWithNoTags() throws Exception { ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray()); Decoder decoder = codec.getDecoder(is); assertTrue(decoder.advance()); - assertTrue(CellUtil.equals(c1, decoder.current())); + assertTrue(PrivateCellUtil.equals(c1, decoder.current())); assertTrue(decoder.advance()); - assertTrue(CellUtil.equals(c2, decoder.current())); + assertTrue(PrivateCellUtil.equals(c2, decoder.current())); assertTrue(decoder.advance()); - assertTrue(CellUtil.equals(c3, decoder.current())); + assertTrue(PrivateCellUtil.equals(c3, decoder.current())); assertFalse(decoder.advance()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java index 6ae5a74ebe8d..a37c5a5a5f90 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java @@ -327,7 +327,7 @@ public void testNextOnSample() throws IOException { int i = 0; do { KeyValue expectedKeyValue = sampleKv.get(i); - Cell cell = seeker.getCell(); + ExtendedCell cell = seeker.getCell(); if ( PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, expectedKeyValue, cell) != 0 @@ -360,7 +360,7 @@ public void testFirstKeyInBlockOnSample() throws IOException { DataBlockEncoder encoder = encoding.getEncoder(); ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(conf, Compression.Algorithm.NONE, encoding), this.useOffheapData); - Cell key = encoder.getFirstKeyCellInBlock(new SingleByteBuff(encodedBuffer)); + ExtendedCell key = encoder.getFirstKeyCellInBlock(new SingleByteBuff(encodedBuffer)); KeyValue firstKv = sampleKv.get(0); if (0 != PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, key, firstKv)) { int commonPrefix = PrivateCellUtil.findCommonPrefixInFlatKey(key, firstKv, false, true); @@ -394,20 +394,20 @@ public void testRowIndexWithTagsButNoTagsInCell() throws IOException { private void checkSeekingConsistency(List encodedSeekers, boolean seekBefore, ExtendedCell keyValue) { - Cell expectedKeyValue = null; + ExtendedCell expectedKeyValue = null; ByteBuffer expectedKey = null; ByteBuffer expectedValue = null; for (DataBlockEncoder.EncodedSeeker seeker : encodedSeekers) { seeker.seekToKeyInBlock(keyValue, seekBefore); seeker.rewind(); - Cell actualKeyValue = seeker.getCell(); + ExtendedCell actualKeyValue = seeker.getCell(); ByteBuffer actualKey = null; actualKey = ByteBuffer.wrap(((KeyValue) seeker.getKey()).getKey()); ByteBuffer actualValue = seeker.getValueShallowCopy(); if (expectedKeyValue != null) { - assertTrue(CellUtil.equals(expectedKeyValue, actualKeyValue)); + assertTrue(PrivateCellUtil.equals(expectedKeyValue, actualKeyValue)); } else { expectedKeyValue = actualKeyValue; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java index f3711428ce53..663c0d540499 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java @@ -34,7 +34,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HBaseTestingUtil; @@ -244,7 +244,7 @@ public void testHFileEncryption() throws Exception { scanner = reader.getScanner(conf, false, false); assertTrue("Initial seekTo failed", scanner.seekTo()); do { - Cell kv = scanner.getCell(); + ExtendedCell kv = scanner.getCell(); assertTrue("Read back an unexpected or invalid KV", testKvs.contains(KeyValueUtil.ensureKeyValue(kv))); i++; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java index 837ee17110ec..ec1ebfd9d633 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java @@ -26,7 +26,7 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ByteBufferKeyValue; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -116,9 +116,9 @@ public void testBasicScanWithLRUCache() throws IOException { String method = this.getName(); this.region = initHRegion(tableName, method, conf, test_util, fam1); try { - List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false); + List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false); - List actual = performScan(row1, fam1); + List actual = performScan(row1, fam1); // Verify result for (int i = 0; i < expected.size(); i++) { assertFalse(actual.get(i) instanceof ByteBufferKeyValue); @@ -154,9 +154,9 @@ public void testBasicScanWithOffheapBucketCache() throws IOException { String method = this.getName(); this.region = initHRegion(tableName, method, conf, test_util, fam1); try { - List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false); + List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false); - List actual = performScan(row1, fam1); + List actual = performScan(row1, fam1); // Verify result for (int i = 0; i < expected.size(); i++) { assertFalse(actual.get(i) instanceof ByteBufferKeyValue); @@ -195,9 +195,9 @@ public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException { String method = this.getName(); this.region = initHRegion(tableName, method, conf, test_util, fam1); try { - List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, true); + List expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, true); - List actual = performScan(row1, fam1); + List actual = performScan(row1, fam1); // Verify result for (int i = 0; i < expected.size(); i++) { assertFalse(actual.get(i) instanceof ByteBufferKeyValue); @@ -211,7 +211,7 @@ public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException { actual = new ArrayList<>(); InternalScanner scanner = region.getScanner(scan); - boolean hasNext = scanner.next(actual); + boolean hasNext = scanner.next((List) actual); assertEquals(false, hasNext); // Verify result for (int i = 0; i < expected.size(); i++) { @@ -229,7 +229,7 @@ public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException { } } - private List insertData(byte[] row1, byte[] qf1, byte[] qf2, byte[] fam1, long ts1, + private List insertData(byte[] row1, byte[] qf1, byte[] qf2, byte[] fam1, long ts1, long ts2, long ts3, boolean withVal) throws IOException { // Putting data in Region Put put = null; @@ -276,7 +276,7 @@ private List insertData(byte[] row1, byte[] qf1, byte[] qf2, byte[] fam1, } // Expected - List expected = new ArrayList<>(); + List expected = new ArrayList<>(); expected.add(kv13); expected.add(kv12); expected.add(kv23); @@ -284,12 +284,12 @@ private List insertData(byte[] row1, byte[] qf1, byte[] qf2, byte[] fam1, return expected; } - private List performScan(byte[] row1, byte[] fam1) throws IOException { + private List performScan(byte[] row1, byte[] fam1) throws IOException { Scan scan = new Scan().withStartRow(row1).addFamily(fam1).readVersions(MAX_VERSIONS); - List actual = new ArrayList<>(); + List actual = new ArrayList<>(); InternalScanner scanner = region.getScanner(scan); - boolean hasNext = scanner.next(actual); + boolean hasNext = scanner.next((List) actual); assertEquals(false, hasNext); return actual; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java index 04c38127d51f..f07238e134d2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekBeforeWithInlineBlocks.java @@ -25,13 +25,13 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.StoreFileWriter; @@ -178,8 +178,8 @@ private void checkNoSeekBefore(ExtendedCell[] cells, HFileScanner scanner, int i } /** Check a key/value pair after it was read by the reader */ - private void checkCell(Cell expected, Cell actual) { + private void checkCell(ExtendedCell expected, ExtendedCell actual) { assertTrue(String.format("Expected key %s, but was %s", CellUtil.getCellKeyAsString(expected), - CellUtil.getCellKeyAsString(actual)), CellUtil.equals(expected, actual)); + CellUtil.getCellKeyAsString(actual)), PrivateCellUtil.equals(expected, actual)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index bf26b019e2a1..4c55c85f9035 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -171,7 +172,7 @@ protected void testSeekBeforeInternals(TagUsage tagUsage) throws IOException { assertEquals("g", toRowStr(scanner.getCell())); assertTrue(scanner.seekBefore(toKV("j", tagUsage))); assertEquals("i", toRowStr(scanner.getCell())); - Cell cell = scanner.getCell(); + ExtendedCell cell = scanner.getCell(); if (tagUsage != TagUsage.NO_TAG && cell.getTagsLength() > 0) { Iterator tagsIterator = PrivateCellUtil.tagsIterator(cell); while (tagsIterator.hasNext()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java index 0f0c22baf9fc..248e8f9d8cef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java @@ -62,14 +62,14 @@ import java.util.Collections; import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseServerBase; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MatcherPredicate; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.Waiter; @@ -176,7 +176,7 @@ public void testNoCodec() throws IOException, ServiceException { public void testCompressCellBlock() throws IOException, ServiceException { Configuration clientConf = new Configuration(CONF); clientConf.set("hbase.client.rpc.compressor", GzipCodec.class.getCanonicalName()); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); int count = 3; for (int i = 0; i < count; i++) { cells.add(CELL); @@ -188,7 +188,8 @@ public void testCompressCellBlock() throws IOException, ServiceException { try (AbstractRpcClient client = createRpcClient(clientConf)) { rpcServer.start(); BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress()); - HBaseRpcController pcrc = new HBaseRpcControllerImpl(CellUtil.createCellScanner(cells)); + HBaseRpcController pcrc = + new HBaseRpcControllerImpl(PrivateCellUtil.createExtendedCellScanner(cells)); String message = "hello"; assertEquals(message, stub.echo(pcrc, EchoRequestProto.newBuilder().setMessage(message).build()).getMessage()); @@ -270,9 +271,8 @@ public void testRpcMaxRequestSize() throws IOException, ServiceException { } // set total RPC size bigger than 100 bytes EchoRequestProto param = EchoRequestProto.newBuilder().setMessage(message.toString()).build(); - stub.echo( - new HBaseRpcControllerImpl(CellUtil.createCellScanner(ImmutableList. of(CELL))), - param); + stub.echo(new HBaseRpcControllerImpl( + PrivateCellUtil.createExtendedCellScanner(ImmutableList. of(CELL))), param); fail("RPC should have failed because it exceeds max request size"); } catch (ServiceException e) { LOG.info("Caught expected exception: " + e); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyChannelWritability.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyChannelWritability.java index 001f6dbd22c7..64fc47ca1940 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyChannelWritability.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyChannelWritability.java @@ -32,13 +32,13 @@ import java.util.concurrent.CompletionException; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompatibilityFactory; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.test.MetricsAssertHelper; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RPCTests; @@ -112,7 +112,7 @@ public void testNettyWritableFatalThreshold() throws Exception { private void sendAndReceive(Configuration conf, NettyRpcServer rpcServer, int requestCount) throws Exception { - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); int count = 3; for (int i = 0; i < count; i++) { cells.add(CELL); @@ -136,9 +136,10 @@ private void sendAndReceive(Configuration conf, NettyRpcServer rpcServer, int re } } - private void sendMessage(List cells, + private void sendMessage(List cells, TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface stub) throws Exception { - HBaseRpcController pcrc = new HBaseRpcControllerImpl(CellUtil.createCellScanner(cells)); + HBaseRpcController pcrc = + new HBaseRpcControllerImpl(PrivateCellUtil.createExtendedCellScanner(cells)); String message = "hello"; assertEquals(message, stub.echo(pcrc, TestProtos.EchoRequestProto.newBuilder().setMessage(message).build()) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtobufRpcServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtobufRpcServiceImpl.java index b2ac0a3deb9b..edb9a64a7a8e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtobufRpcServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtobufRpcServiceImpl.java @@ -21,10 +21,10 @@ import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -82,8 +82,8 @@ public EchoResponseProto echo(RpcController controller, EchoRequestProto request HBaseRpcController pcrc = (HBaseRpcController) controller; // If cells, scan them to check we are able to iterate what we were given and since this is an // echo, just put them back on the controller creating a new block. Tests our block building. - CellScanner cellScanner = pcrc.cellScanner(); - List list = null; + ExtendedCellScanner cellScanner = pcrc.cellScanner(); + List list = null; if (cellScanner != null) { list = new ArrayList<>(); try { @@ -94,7 +94,7 @@ public EchoResponseProto echo(RpcController controller, EchoRequestProto request throw new ServiceException(e); } } - cellScanner = CellUtil.createCellScanner(list); + cellScanner = PrivateCellUtil.createExtendedCellScanner(list); pcrc.setCellScanner(cellScanner); } return EchoResponseProto.newBuilder().setMessage(request.getMessage()).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index a7164a6fab64..a25bae6ec7bd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -33,10 +33,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.Abortable; -import org.apache.hadoop.hbase.CellScannable; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.CoordinatedStateManager; +import org.apache.hadoop.hbase.ExtendedCellScannable; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableDescriptors; import org.apache.hadoop.hbase.TableName; @@ -388,9 +388,10 @@ public ScanResponse scan(RpcController controller, ScanRequest request) throws S Result result = next(scannerId); if (result != null) { builder.addCellsPerResult(result.size()); - List results = new ArrayList<>(1); + List results = new ArrayList<>(1); results.add(result); - ((HBaseRpcController) controller).setCellScanner(CellUtil.createCellScanner(results)); + ((HBaseRpcController) controller) + .setCellScanner(PrivateCellUtil.createExtendedCellScanner(results)); builder.setMoreResults(true); } else { builder.setMoreResults(false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobStoreCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobStoreCompaction.java index c2f2b3fd4260..835d8b83d69e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobStoreCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mob/TestMobStoreCompaction.java @@ -39,6 +39,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -373,11 +374,11 @@ private int countMobRows() throws IOException { InternalScanner scanner = region.getScanner(scan); int scannedCount = 0; - List results = new ArrayList<>(); + List results = new ArrayList<>(); boolean hasMore = true; while (hasMore) { - hasMore = scanner.next(results); - for (Cell c : results) { + hasMore = scanner.next((List) results); + for (ExtendedCell c : results) { if (MobUtils.isMobReferenceCell(c)) { scannedCount++; } @@ -401,15 +402,15 @@ private int countReferencedMobFiles() throws IOException { scan.setAttribute(MobConstants.MOB_SCAN_RAW, Bytes.toBytes(Boolean.TRUE)); InternalScanner scanner = region.getScanner(scan); - List kvs = new ArrayList<>(); + List kvs = new ArrayList<>(); boolean hasMore = true; String fileName; Set files = new HashSet<>(); do { kvs.clear(); - hasMore = scanner.next(kvs); + hasMore = scanner.next((List) kvs); for (Cell kv : kvs) { - if (!MobUtils.isMobReferenceCell(kv)) { + if (!MobUtils.isMobReferenceCell((ExtendedCell) kv)) { continue; } if (!MobUtils.hasValidMobRefCellValue(kv)) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java index 124214e46af3..05243bd93a6c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java @@ -31,7 +31,7 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -717,7 +717,7 @@ public Message getParam() { } @Override - public CellScanner getCellScanner() { + public ExtendedCellScanner getCellScanner() { return null; } @@ -783,7 +783,7 @@ public int getRemotePort() { } @Override - public void setResponse(Message param, CellScanner cells, Throwable errorThrowable, + public void setResponse(Message param, ExtendedCellScanner cells, Throwable errorThrowable, String error) { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java index 1de0a0d31a33..4ec3e90aad86 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java @@ -29,7 +29,7 @@ import java.util.Collections; import java.util.Map; import java.util.Optional; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.ipc.RpcCall; import org.apache.hadoop.hbase.ipc.RpcCallback; @@ -118,7 +118,7 @@ public Message getParam() { } @Override - public CellScanner getCellScanner() { + public ExtendedCellScanner getCellScanner() { return null; } @@ -182,7 +182,7 @@ public int getRemotePort() { } @Override - public void setResponse(Message param, CellScanner cells, Throwable errorThrowable, + public void setResponse(Message param, ExtendedCellScanner cells, Throwable errorThrowable, String error) { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java index fdd5c7d5cf90..b440431f1fb0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java @@ -28,7 +28,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.ipc.RpcCall; @@ -184,7 +184,7 @@ public Message getParam() { } @Override - public CellScanner getCellScanner() { + public ExtendedCellScanner getCellScanner() { return null; } @@ -244,7 +244,7 @@ public int getRemotePort() { } @Override - public void setResponse(Message param, CellScanner cells, Throwable errorThrowable, + public void setResponse(Message param, ExtendedCellScanner cells, Throwable errorThrowable, String error) { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java index 5dd0ce8dafb7..bfbeed768554 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java @@ -23,8 +23,8 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -46,19 +46,19 @@ public class TestReplicationProtobuf { */ @Test public void testGetCellScanner() throws IOException { - List a = new ArrayList<>(); + List a = new ArrayList<>(); KeyValue akv = new KeyValue(Bytes.toBytes("a"), -1L); a.add(akv); // Add a few just to make it less regular. a.add(new KeyValue(Bytes.toBytes("aa"), -1L)); a.add(new KeyValue(Bytes.toBytes("aaa"), -1L)); - List b = new ArrayList<>(); + List b = new ArrayList<>(); KeyValue bkv = new KeyValue(Bytes.toBytes("b"), -1L); a.add(bkv); - List c = new ArrayList<>(); + List c = new ArrayList<>(); KeyValue ckv = new KeyValue(Bytes.toBytes("c"), -1L); c.add(ckv); - List> all = new ArrayList<>(); + List> all = new ArrayList<>(); all.add(a); all.add(b); all.add(c); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java index ec9de92e9f25..edabfc51f2bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java @@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -251,7 +251,7 @@ public boolean verifyCodecs(final KeyValueScanner scanner, final int kvLimit) th KeyValue currentKv; scanner.seek(KeyValue.LOWESTKEY); - List> codecIterators = new ArrayList<>(); + List> codecIterators = new ArrayList<>(); for (EncodedDataBlock codec : codecs) { codecIterators.add(codec.getIterator(HFileBlock.headerSize(useHBaseChecksum))); } @@ -260,8 +260,8 @@ public boolean verifyCodecs(final KeyValueScanner scanner, final int kvLimit) th while ((currentKv = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) { // Iterates through key/value pairs ++j; - for (Iterator it : codecIterators) { - Cell c = it.next(); + for (Iterator it : codecIterators) { + ExtendedCell c = it.next(); KeyValue codecKv = KeyValueUtil.ensureKeyValue(c); if ( codecKv == null @@ -337,7 +337,7 @@ private int benchmarkEncoder(int previousTotalSize, EncodedDataBlock codec) { for (int itTime = 0; itTime < benchmarkNTimes; ++itTime) { totalSize = 0; - Iterator it; + Iterator it; it = codec.getIterator(HFileBlock.headerSize(useHBaseChecksum)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java index 601370357744..f33faf8c8e02 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java @@ -23,7 +23,6 @@ import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -97,7 +96,7 @@ private void runTest(Path path, DataBlockEncoding blockEncoding, List results = new ArrayList<>(); - while (s.next(results)) + List results = new ArrayList<>(); + while (s.next((List) results)) ; s.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java index e2f9ac2f34ac..52826e2e8e23 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java @@ -31,11 +31,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.regionserver.ChunkCreator.ChunkType; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -187,10 +187,10 @@ private void testSubSet(CellSet cs) throws Exception { Iterator excludeIter = excludeTail.iterator(); Iterator includeIter = includeTail.iterator(); for (int j = 1 + i; j != ascCells.length; ++j) { - assertEquals(true, CellUtil.equals(excludeIter.next(), ascCells[j])); + assertEquals(true, PrivateCellUtil.equals(excludeIter.next(), ascCells[j])); } for (int j = i; j != ascCells.length; ++j) { - assertEquals(true, CellUtil.equals(includeIter.next(), ascCells[j])); + assertEquals(true, PrivateCellUtil.equals(includeIter.next(), ascCells[j])); } } assertEquals(NUM_OF_CELLS, cs.tailSet(lowerOuterCell, false).size()); @@ -203,10 +203,10 @@ private void testSubSet(CellSet cs) throws Exception { Iterator excludeIter = excludeHead.iterator(); Iterator includeIter = includeHead.iterator(); for (int j = 0; j != i; ++j) { - assertEquals(true, CellUtil.equals(excludeIter.next(), ascCells[j])); + assertEquals(true, PrivateCellUtil.equals(excludeIter.next(), ascCells[j])); } for (int j = 0; j != i + 1; ++j) { - assertEquals(true, CellUtil.equals(includeIter.next(), ascCells[j])); + assertEquals(true, PrivateCellUtil.equals(includeIter.next(), ascCells[j])); } } assertEquals(0, cs.headSet(lowerOuterCell, false).size()); @@ -217,7 +217,7 @@ private void testSubSet(CellSet cs) throws Exception { assertEquals(NUM_OF_CELLS, sub.size()); Iterator iter = sub.values().iterator(); for (int i = 0; i != ascCells.length; ++i) { - assertEquals(true, CellUtil.equals(iter.next(), ascCells[i])); + assertEquals(true, PrivateCellUtil.equals(iter.next(), ascCells[i])); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java index 53e183f82590..c9e6cd83ec6f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java @@ -311,15 +311,15 @@ public void testGetReferencesFromFiles() throws IOException { InternalScanner scanner = (InternalScanner) store.getScanner(scan, scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()), 0); - List results = new ArrayList<>(); - scanner.next(results); + List results = new ArrayList<>(); + scanner.next((List) results); Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); // Compare Assert.assertEquals(expected.size(), results.size()); for (int i = 0; i < results.size(); i++) { - Cell cell = results.get(i); + ExtendedCell cell = results.get(i); Assert.assertTrue(MobUtils.isMobReferenceCell(cell)); } } @@ -399,15 +399,15 @@ public void testMobCellSizeThreshold() throws IOException { InternalScanner scanner = (InternalScanner) store.getScanner(scan, scan.getFamilyMap().get(store.getColumnFamilyDescriptor().getName()), 0); - List results = new ArrayList<>(); - scanner.next(results); + List results = new ArrayList<>(); + scanner.next((List) results); Collections.sort(results, CellComparatorImpl.COMPARATOR); scanner.close(); // Compare Assert.assertEquals(expected.size(), results.size()); for (int i = 0; i < results.size(); i++) { - Cell cell = results.get(i); + ExtendedCell cell = results.get(i); // this is not mob reference cell. Assert.assertFalse(MobUtils.isMobReferenceCell(cell)); Assert.assertEquals(expected.get(i), results.get(i)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index a271920c0150..d9856b40a831 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -82,6 +82,7 @@ import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DroppedSnapshotException; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -3780,26 +3781,26 @@ public void testRegionScanner_Next() throws IOException { scan.addFamily(fam2); scan.addFamily(fam4); try (InternalScanner is = region.getScanner(scan)) { - List res = null; + List res = null; // Result 1 - List expected1 = new ArrayList<>(); + List expected1 = new ArrayList<>(); expected1.add(new KeyValue(row1, fam2, null, ts, KeyValue.Type.Put, null)); expected1.add(new KeyValue(row1, fam4, null, ts, KeyValue.Type.Put, null)); res = new ArrayList<>(); - is.next(res); + is.next((List) res); for (int i = 0; i < res.size(); i++) { assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected1.get(i), res.get(i))); } // Result 2 - List expected2 = new ArrayList<>(); + List expected2 = new ArrayList<>(); expected2.add(new KeyValue(row2, fam2, null, ts, KeyValue.Type.Put, null)); expected2.add(new KeyValue(row2, fam4, null, ts, KeyValue.Type.Put, null)); res = new ArrayList<>(); - is.next(res); + is.next((List) res); for (int i = 0; i < res.size(); i++) { assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected2.get(i), res.get(i))); } @@ -3894,7 +3895,7 @@ public void testScanner_ExplicitColumns_FromFilesOnly_EnforceVersions() throws I region.flush(true); // Expected - List expected = new ArrayList<>(); + List expected = new ArrayList<>(); expected.add(kv13); expected.add(kv12); expected.add(kv23); @@ -3904,9 +3905,9 @@ public void testScanner_ExplicitColumns_FromFilesOnly_EnforceVersions() throws I scan.addColumn(fam1, qf1); scan.addColumn(fam1, qf2); scan.readVersions(MAX_VERSIONS); - List actual = new ArrayList<>(); + List actual = new ArrayList<>(); try (InternalScanner scanner = region.getScanner(scan)) { - boolean hasNext = scanner.next(actual); + boolean hasNext = scanner.next((List) actual); assertEquals(false, hasNext); // Verify result @@ -3968,7 +3969,7 @@ public void testScanner_ExplicitColumns_FromMemStoreAndFiles_EnforceVersions() region.put(put); // Expected - List expected = new ArrayList<>(); + List expected = new ArrayList<>(); expected.add(kv14); expected.add(kv13); expected.add(kv12); @@ -3981,9 +3982,9 @@ public void testScanner_ExplicitColumns_FromMemStoreAndFiles_EnforceVersions() scan.addColumn(fam1, qf2); int versions = 3; scan.readVersions(versions); - List actual = new ArrayList<>(); + List actual = new ArrayList<>(); try (InternalScanner scanner = region.getScanner(scan)) { - boolean hasNext = scanner.next(actual); + boolean hasNext = scanner.next((List) actual); assertEquals(false, hasNext); // Verify result @@ -4082,7 +4083,7 @@ public void testScanner_Wildcard_FromFilesOnly_EnforceVersions() throws IOExcept region.flush(true); // Expected - List expected = new ArrayList<>(); + List expected = new ArrayList<>(); expected.add(kv13); expected.add(kv12); expected.add(kv23); @@ -4091,9 +4092,9 @@ public void testScanner_Wildcard_FromFilesOnly_EnforceVersions() throws IOExcept Scan scan = new Scan().withStartRow(row1); scan.addFamily(fam1); scan.readVersions(MAX_VERSIONS); - List actual = new ArrayList<>(); + List actual = new ArrayList<>(); try (InternalScanner scanner = region.getScanner(scan)) { - boolean hasNext = scanner.next(actual); + boolean hasNext = scanner.next((List) actual); assertEquals(false, hasNext); // Verify result @@ -4207,9 +4208,9 @@ public void testScanner_Wildcard_FromMemStoreAndFiles_EnforceVersions() throws I Scan scan = new Scan().withStartRow(row1); int versions = 3; scan.readVersions(versions); - List actual = new ArrayList<>(); + List actual = new ArrayList<>(); try (InternalScanner scanner = region.getScanner(scan)) { - boolean hasNext = scanner.next(actual); + boolean hasNext = scanner.next((List) actual); assertEquals(false, hasNext); // Verify result diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index 7b8fcf4e334c..6a410f953fe7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -35,6 +35,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; @@ -397,8 +398,8 @@ static class FindBulkHBaseListener extends TestWALActionsListener.DummyWALAction @Override public void visitLogEntryBeforeWrite(RegionInfo info, WALKey logKey, WALEdit logEdit) { for (Cell cell : logEdit.getCells()) { - KeyValue kv = KeyValueUtil.ensureKeyValue(cell); - for (Map.Entry entry : kv.toStringMap().entrySet()) { + KeyValue kv = KeyValueUtil.ensureKeyValue((ExtendedCell) cell); + for (Map.Entry entry : kv.toStringMap().entrySet()) { if (entry.getValue().equals(Bytes.toString(WALEdit.BULK_LOAD))) { found = true; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java index a3fa1bb65db8..13f2101c0040 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java @@ -2113,7 +2113,7 @@ public void testForceCloneOfBigCellForCellChunkImmutableSegment() throws Excepti (StoreScanner) store.getScanner(new Scan(new Get(rowKey1)), quals, seqId + 1); SegmentScanner segmentScanner = getTypeKeyValueScanner(storeScanner, SegmentScanner.class); ExtendedCell resultCell1 = segmentScanner.next(); - assertTrue(CellUtil.equals(resultCell1, originalCell1)); + assertTrue(PrivateCellUtil.equals(resultCell1, originalCell1)); int cell1ChunkId = resultCell1.getChunkId(); assertTrue(cell1ChunkId != ExtendedCell.CELL_NOT_BASED_ON_CHUNK); assertNull(segmentScanner.next()); @@ -2140,12 +2140,12 @@ public void testForceCloneOfBigCellForCellChunkImmutableSegment() throws Excepti // {@link CellChunkMap#getCell} we could not get the data chunk by chunkId. storeScanner = (StoreScanner) store.getScanner(new Scan(new Get(rowKey1)), quals, seqId + 1); segmentScanner = getTypeKeyValueScanner(storeScanner, SegmentScanner.class); - Cell newResultCell1 = segmentScanner.next(); + ExtendedCell newResultCell1 = segmentScanner.next(); assertTrue(newResultCell1 != resultCell1); - assertTrue(CellUtil.equals(newResultCell1, originalCell1)); + assertTrue(PrivateCellUtil.equals(newResultCell1, originalCell1)); - Cell resultCell2 = segmentScanner.next(); - assertTrue(CellUtil.equals(resultCell2, originalCell2)); + ExtendedCell resultCell2 = segmentScanner.next(); + assertTrue(PrivateCellUtil.equals(resultCell2, originalCell2)); assertNull(segmentScanner.next()); segmentScanner.close(); storeScanner.close(); @@ -2569,17 +2569,17 @@ public void testClearSnapshotGetScannerConcurrently() throws Exception { assertTrue(!memStoreLAB.chunks.isEmpty()); assertTrue(!memStoreLAB.isReclaimed()); - Cell cell1 = segmentScanner.next(); - CellUtil.equals(smallCell, cell1); - Cell cell2 = segmentScanner.next(); - CellUtil.equals(largeCell, cell2); + ExtendedCell cell1 = segmentScanner.next(); + PrivateCellUtil.equals(smallCell, cell1); + ExtendedCell cell2 = segmentScanner.next(); + PrivateCellUtil.equals(largeCell, cell2); assertNull(segmentScanner.next()); } else { - List results = new ArrayList<>(); - storeScanner.next(results); + List results = new ArrayList<>(); + storeScanner.next((List) results); assertEquals(2, results.size()); - CellUtil.equals(smallCell, results.get(0)); - CellUtil.equals(largeCell, results.get(1)); + PrivateCellUtil.equals(smallCell, results.get(0)); + PrivateCellUtil.equals(largeCell, results.get(1)); } assertTrue(exceptionRef.get() == null); } finally { @@ -2712,11 +2712,11 @@ public void testMemoryLeakWhenFlushMemStoreRetrying() throws Exception { assertTrue(storeScanner.currentScanners.size() == 1); assertTrue(storeScanner.currentScanners.get(0) instanceof StoreFileScanner); - List results = new ArrayList<>(); - storeScanner.next(results); + List results = new ArrayList<>(); + storeScanner.next((List) results); assertEquals(2, results.size()); - CellUtil.equals(smallCell, results.get(0)); - CellUtil.equals(largeCell, results.get(1)); + PrivateCellUtil.equals(smallCell, results.get(0)); + PrivateCellUtil.equals(largeCell, results.get(1)); } finally { if (storeScanner != null) { storeScanner.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index 824c195fd0f1..3db0b3d3f64c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -45,6 +45,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -1058,8 +1059,8 @@ public void testCacheOnWriteEvictOnClose() throws Exception { readerTwo.loadFileInfo(); StoreFileScanner scannerTwo = getStoreFileScanner(readerTwo, true, true); scannerTwo.seek(KeyValue.LOWESTKEY); - Cell kv1 = null; - Cell kv2 = null; + ExtendedCell kv1 = null; + ExtendedCell kv2 = null; while ((kv1 = scannerOne.next()) != null) { kv2 = scannerTwo.next(); assertTrue(kv1.equals(kv2)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java index f9c332564d94..fdaa19dc3e5a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java @@ -29,6 +29,7 @@ import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -349,8 +350,8 @@ public void testRawScan() throws Exception { s.setRaw(true); s.readAllVersions(); InternalScanner scan = region.getScanner(s); - List kvs = new ArrayList<>(); - scan.next(kvs); + List kvs = new ArrayList<>(); + scan.next((List) kvs); assertEquals(8, kvs.size()); assertTrue(PrivateCellUtil.isDeleteFamily(kvs.get(0))); assertArrayEquals(CellUtil.cloneValue(kvs.get(1)), T3); @@ -369,7 +370,7 @@ public void testRawScan() throws Exception { s.setTimeRange(0, 1); scan = region.getScanner(s); kvs = new ArrayList<>(); - scan.next(kvs); + scan.next((List) kvs); // nothing in this interval, not even delete markers assertTrue(kvs.isEmpty()); @@ -380,7 +381,7 @@ public void testRawScan() throws Exception { s.setTimeRange(0, ts + 2); scan = region.getScanner(s); kvs = new ArrayList<>(); - scan.next(kvs); + scan.next((List) kvs); assertEquals(4, kvs.size()); assertTrue(PrivateCellUtil.isDeleteFamily(kvs.get(0))); assertArrayEquals(CellUtil.cloneValue(kvs.get(1)), T1); @@ -395,7 +396,7 @@ public void testRawScan() throws Exception { s.setTimeRange(ts + 3, ts + 5); scan = region.getScanner(s); kvs = new ArrayList<>(); - scan.next(kvs); + scan.next((List) kvs); assertEquals(2, kvs.size()); assertArrayEquals(CellUtil.cloneValue(kvs.get(0)), T3); assertTrue(CellUtil.isDelete(kvs.get(1))); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java index 0f55805f6fd8..0db4175916df 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; @@ -210,15 +211,15 @@ public void testMultiColumnScanner() throws IOException { } InternalScanner scanner = region.getScanner(scan); - List results = new ArrayList<>(); + List results = new ArrayList<>(); int kvPos = 0; int numResults = 0; String queryInfo = "columns queried: " + qualSet + " (columnBitMask=" + columnBitMask + "), maxVersions=" + maxVersions; - while (scanner.next(results) || results.size() > 0) { - for (Cell kv : results) { + while (scanner.next((List) results) || results.size() > 0) { + for (ExtendedCell kv : results) { while ( kvPos < kvs.size() && !matchesQuery(kvs.get(kvPos), qualSet, maxVersions, lastDelTimeMap) @@ -236,7 +237,7 @@ public void testMultiColumnScanner() throws IOException { "Scanner returned additional key/value: " + kv + ", " + queryInfo + deleteInfo + ";", kvPos < kvs.size()); assertTrue("Scanner returned wrong key/value; " + queryInfo + deleteInfo + ";", - PrivateCellUtil.equalsIgnoreMvccVersion(kvs.get(kvPos), (kv))); + PrivateCellUtil.equalsIgnoreMvccVersion(kvs.get(kvPos), kv)); ++kvPos; ++numResults; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReplicateToReplica.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReplicateToReplica.java index a3b4c9e0347f..253ca876bd34 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReplicateToReplica.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReplicateToReplica.java @@ -40,7 +40,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -251,7 +251,7 @@ private FlushResult flushPrimary() throws IOException { private void replicate(Pair, CompletableFuture> pair) throws IOException { Pair params = ReplicationProtobufUtil.buildReplicateWALEntryRequest( + ExtendedCellScanner> params = ReplicationProtobufUtil.buildReplicateWALEntryRequest( pair.getFirst().toArray(new WAL.Entry[0]), secondary.getRegionInfo().getEncodedNameAsBytes(), null, null, null); for (WALEntry entry : params.getFirst().getEntryList()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java index 67671fe12fef..e3ba4d85a3a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java @@ -26,7 +26,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; -import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.KeyValue; @@ -93,7 +93,7 @@ public void testReseek() throws Exception { // Now do reseek with empty KV to position to the beginning of the file KeyValue k = KeyValueUtil.createFirstOnRow(Bytes.toBytes("k2")); s.reseek(k); - Cell kv = s.next(); + ExtendedCell kv = s.next(); kv = s.next(); kv = s.next(); byte[] key5 = Bytes.toBytes("k5"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java index 45b927c07afa..b04a0054276c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java @@ -29,6 +29,8 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -489,7 +491,7 @@ public void testTagsWithAppendAndIncrement() throws Exception { TestCoprocessorForTags.checkTagPresence = true; ResultScanner scanner = table.getScanner(new Scan()); Result result = scanner.next(); - KeyValue kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); + KeyValue kv = KeyValueUtil.ensureKeyValue((ExtendedCell) result.getColumnLatestCell(f, q)); List tags = TestCoprocessorForTags.tags; assertEquals(3L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(1, tags.size()); @@ -504,7 +506,7 @@ public void testTagsWithAppendAndIncrement() throws Exception { TestCoprocessorForTags.checkTagPresence = true; scanner = table.getScanner(new Scan()); result = scanner.next(); - kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); + kv = KeyValueUtil.ensureKeyValue((ExtendedCell) result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(5L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(2, tags.size()); @@ -529,7 +531,7 @@ public void testTagsWithAppendAndIncrement() throws Exception { TestCoprocessorForTags.checkTagPresence = true; scanner = table.getScanner(new Scan().withStartRow(row2)); result = scanner.next(); - kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); + kv = KeyValueUtil.ensureKeyValue((ExtendedCell) result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(4L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(1, tags.size()); @@ -549,7 +551,7 @@ public void testTagsWithAppendAndIncrement() throws Exception { TestCoprocessorForTags.checkTagPresence = true; scanner = table.getScanner(new Scan().withStartRow(row3)); result = scanner.next(); - kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); + kv = KeyValueUtil.ensureKeyValue((ExtendedCell) result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(1, tags.size()); assertEquals("tag1", Bytes.toString(Tag.cloneValue(tags.get(0)))); @@ -563,7 +565,7 @@ public void testTagsWithAppendAndIncrement() throws Exception { TestCoprocessorForTags.checkTagPresence = true; scanner = table.getScanner(new Scan().withStartRow(row3)); result = scanner.next(); - kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); + kv = KeyValueUtil.ensureKeyValue((ExtendedCell) result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(2, tags.size()); // We cannot assume the ordering of tags @@ -587,7 +589,7 @@ public void testTagsWithAppendAndIncrement() throws Exception { TestCoprocessorForTags.checkTagPresence = true; scanner = table.getScanner(new Scan().withStartRow(row4)); result = scanner.next(); - kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); + kv = KeyValueUtil.ensureKeyValue((ExtendedCell) result.getColumnLatestCell(f, q)); tags = TestCoprocessorForTags.tags; assertEquals(1, tags.size()); assertEquals("tag2", Bytes.toString(Tag.cloneValue(tags.get(0)))); @@ -653,7 +655,7 @@ private void updateMutationAddingTags(final Mutation m) { if (attribute != null) { for (List edits : m.getFamilyCellMap().values()) { for (Cell cell : edits) { - KeyValue kv = KeyValueUtil.ensureKeyValue(cell); + KeyValue kv = KeyValueUtil.ensureKeyValue((ExtendedCell) cell); if (cf == null) { cf = CellUtil.cloneFamily(kv); } @@ -696,9 +698,9 @@ public boolean postScannerNext(ObserverContext e, if (results.size() > 0) { // Check tag presence in the 1st cell in 1st Result Result result = results.get(0); - CellScanner cellScanner = result.cellScanner(); + ExtendedCellScanner cellScanner = result.cellScanner(); if (cellScanner.advance()) { - Cell cell = cellScanner.current(); + ExtendedCell cell = cellScanner.current(); tags = PrivateCellUtil.getTags(cell); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java index 5359dec2e64d..028de5066c80 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java @@ -203,7 +203,9 @@ public Scanner(KeyValue... kvs) { @Override public boolean next(List result, ScannerContext scannerContext) throws IOException { - if (kvs.isEmpty()) return false; + if (kvs.isEmpty()) { + return false; + } result.add(kvs.remove(0)); return !kvs.isEmpty(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index 484206ad8387..33efd51368bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; @@ -219,7 +220,7 @@ public void prePut(final ObserverContext e, final if (attribute != null) { for (List edits : put.getFamilyCellMap().values()) { for (Cell cell : edits) { - KeyValue kv = KeyValueUtil.ensureKeyValue(cell); + KeyValue kv = KeyValueUtil.ensureKeyValue((ExtendedCell) cell); if (cf == null) { cf = CellUtil.cloneFamily(kv); } @@ -257,7 +258,7 @@ public void postGetOp(ObserverContext e, Get get, // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { Cell cell = results.get(0); - TAGS = PrivateCellUtil.getTags(cell); + TAGS = PrivateCellUtil.getTags((ExtendedCell) cell); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index 0b9a9d663e5a..f3fd0878f9a4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Tag; @@ -273,7 +274,9 @@ public boolean evaluate(Cell cell) throws IOException { final List authLabelsFinal = authLabels; return new VisibilityExpEvaluator() { @Override - public boolean evaluate(Cell cell) throws IOException { + public boolean evaluate(Cell c) throws IOException { + assert c instanceof ExtendedCell; + ExtendedCell cell = (ExtendedCell) c; boolean visibilityTagPresent = false; // Save an object allocation where we can if (cell.getTagsLength() > 0) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java index dc313d414ae8..10ca33e38725 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; @@ -292,7 +293,7 @@ protected static void doAssert(byte[] row, String visTag) throws Exception { (Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0, row.length)) ) { - List tags = PrivateCellUtil.getTags(cell); + List tags = PrivateCellUtil.getTags((ExtendedCell) cell); for (Tag tag : tags) { if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) { assertEquals(visTag, Tag.getValueAsString(tag)); @@ -418,14 +419,15 @@ public void prePut(ObserverContext e, Put m, WALEd if (attribute != null) { for (List edits : m.getFamilyCellMap().values()) { for (Cell cell : edits) { - KeyValue kv = KeyValueUtil.ensureKeyValue(cell); + KeyValue kv = KeyValueUtil.ensureKeyValue((ExtendedCell) cell); if (cf == null) { cf = CellUtil.cloneFamily(kv); } Tag tag = new ArrayBackedTag((byte) NON_VIS_TAG_TYPE, attribute); - List tagList = new ArrayList<>(PrivateCellUtil.getTags(cell).size() + 1); + List tagList = + new ArrayList<>(PrivateCellUtil.getTags((ExtendedCell) cell).size() + 1); tagList.add(tag); - tagList.addAll(PrivateCellUtil.getTags(cell)); + tagList.addAll(PrivateCellUtil.getTags((ExtendedCell) cell)); Cell newcell = PrivateCellUtil.createCell(kv, tagList); ((List) updatedCells).add(newcell); } @@ -452,7 +454,7 @@ public void postGetOp(ObserverContext e, Get get, // Check tag presence in the 1st cell in 1st Result if (!results.isEmpty()) { Cell cell = results.get(0); - tags = PrivateCellUtil.getTags(cell); + tags = PrivateCellUtil.getTags((ExtendedCell) cell); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java index 20f621fba61a..d017db6eb9bd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java @@ -144,7 +144,8 @@ public static void verifyTags(Table table) throws IOException { ResultScanner s = table.getScanner(new Scan()); for (Result r : s) { for (Cell c : r.listCells()) { - Optional tag = PrivateCellUtil.getTag(c, TagType.MOB_TABLE_NAME_TAG_TYPE); + Optional tag = + PrivateCellUtil.getTag((ExtendedCell) c, TagType.MOB_TABLE_NAME_TAG_TYPE); if (!tag.isPresent()) { fail(c.toString() + " has null tag"); continue; diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index a02f944e12a7..53299dbab997 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HConstants; @@ -55,6 +56,7 @@ import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.OnlineLogRecord; import org.apache.hadoop.hbase.client.OperationWithAttributes; +import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.Result; @@ -220,14 +222,14 @@ public static List getsFromThrift(List in) throws IOException { * @return converted result, returns an empty result if the input is null */ public static TResult resultFromHBase(Result in) { - Cell[] raw = in.rawCells(); + ExtendedCell[] raw = PackagePrivateFieldAccessor.getExtendedRawCells(in); TResult out = new TResult(); byte[] row = in.getRow(); if (row != null) { out.setRow(in.getRow()); } List columnValues = new ArrayList<>(raw.length); - for (Cell kv : raw) { + for (ExtendedCell kv : raw) { TColumnValue col = new TColumnValue(); col.setFamily(CellUtil.cloneFamily(kv)); col.setQualifier(CellUtil.cloneQualifier(kv)); @@ -1309,9 +1311,10 @@ public static TPut putFromHBase(Put in) { if (in.getDurability() != Durability.USE_DEFAULT) { out.setDurability(durabilityFromHBase(in.getDurability())); } - for (Map.Entry> entry : in.getFamilyCellMap().entrySet()) { + for (Map.Entry> entry : PackagePrivateFieldAccessor + .getExtendedFamilyCellMap(in).entrySet()) { byte[] family = entry.getKey(); - for (Cell cell : entry.getValue()) { + for (ExtendedCell cell : entry.getValue()) { TColumnValue columnValue = new TColumnValue(); columnValue.setFamily(family).setQualifier(CellUtil.cloneQualifier(cell)) .setType(cell.getType().getCode()).setTimestamp(cell.getTimestamp()) @@ -1372,9 +1375,10 @@ public static TAppend appendFromHBase(Append in) throws IOException { if (in.getDurability() != Durability.USE_DEFAULT) { out.setDurability(durabilityFromHBase(in.getDurability())); } - for (Map.Entry> entry : in.getFamilyCellMap().entrySet()) { + for (Map.Entry> entry : PackagePrivateFieldAccessor + .getExtendedFamilyCellMap(in).entrySet()) { byte[] family = entry.getKey(); - for (Cell cell : entry.getValue()) { + for (ExtendedCell cell : entry.getValue()) { TColumnValue columnValue = new TColumnValue(); columnValue.setFamily(family).setQualifier(CellUtil.cloneQualifier(cell)) .setType(cell.getType().getCode()).setTimestamp(cell.getTimestamp())