From 55d661345dbac7423d542a6baa2542512820cf7a Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Sun, 28 Jul 2024 21:59:42 +0800 Subject: [PATCH] HBASE-28587 Remove deprecated methods in Cell --- .../hadoop/hbase/filter/KeyOnlyFilter.java | 15 ++-- .../hbase/shaded/protobuf/ProtobufUtil.java | 15 ++-- .../hadoop/hbase/client/TestOperation.java | 13 +++- .../shaded/protobuf/TestProtobufUtil.java | 11 ++- .../java/org/apache/hadoop/hbase/Cell.java | 75 ++----------------- .../hadoop/hbase/CellComparatorImpl.java | 14 +++- .../org/apache/hadoop/hbase/CellUtil.java | 31 +++++--- .../org/apache/hadoop/hbase/ExtendedCell.java | 11 +++ .../org/apache/hadoop/hbase/KeyValue.java | 4 +- .../org/apache/hadoop/hbase/KeyValueUtil.java | 59 --------------- .../hadoop/hbase/MetaCellComparator.java | 7 +- .../apache/hadoop/hbase/PrivateCellUtil.java | 33 ++++++++ .../java/org/apache/hadoop/hbase/RawCell.java | 21 ++++++ .../hadoop/hbase/codec/KeyValueCodec.java | 4 +- .../hbase/codec/KeyValueCodecWithTags.java | 4 +- .../hadoop/hbase/io/encoding/NoneEncoder.java | 3 +- .../apache/hadoop/hbase/TestCellBuilder.java | 4 +- .../hbase/TestIndividualBytesFieldCell.java | 4 +- .../org/apache/hadoop/hbase/TestKeyValue.java | 4 +- .../hadoop/hbase/mapreduce/WALPlayer.java | 6 +- .../hbase/regionserver/RSRpcServices.java | 2 +- .../regionserver/ReplicationSinkService.java | 7 +- .../hbase/regionserver/StoreFileReader.java | 7 +- .../hbase/regionserver/wal/WALCellCodec.java | 4 +- .../hbase/replication/BulkLoadCellFilter.java | 4 +- .../NamespaceTableCfWALEntryFilter.java | 6 +- .../ReplicationSinkServiceImpl.java | 4 +- .../replication/ScopeWALEntryFilter.java | 5 +- .../regionserver/ReplicationSink.java | 11 +-- .../security/access/AccessController.java | 2 +- .../apache/hadoop/hbase/wal/WALSplitUtil.java | 3 +- .../hadoop/hbase/io/hfile/TestHFile.java | 2 +- .../regionserver/TestBulkLoadReplication.java | 6 +- .../TestBulkLoadReplicationHFileRefs.java | 6 +- .../hbase/regionserver/TestHRegion.java | 5 +- .../TestMemStoreSegmentsIterator.java | 6 +- .../hadoop/hbase/regionserver/TestTags.java | 8 +- .../regionserver/TestReplicationSink.java | 68 +++++++++-------- .../regionserver/TestWALEntrySinkFilter.java | 18 +++-- .../util/LoadTestDataGeneratorWithTags.java | 7 +- .../hadoop/hbase/wal/TestWALSplitToHFile.java | 15 ++-- 41 files changed, 264 insertions(+), 270 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index 3cbd2771a62d..ef3687482f4e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -26,6 +26,7 @@ import java.util.Optional; import org.apache.hadoop.hbase.ByteBufferExtendedCell; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -68,11 +69,15 @@ public boolean filterRowKey(Cell cell) throws IOException { } @Override - public Cell transformCell(Cell cell) { - return createKeyOnlyCell(cell); + public Cell transformCell(Cell cell) throws IOException { + if (cell instanceof ExtendedCell) { + return createKeyOnlyCell((ExtendedCell) cell); + } + throw new DoNotRetryIOException( + "Customized cell implementation is not support: " + cell.getClass().getName()); } - private Cell createKeyOnlyCell(Cell c) { + private Cell createKeyOnlyCell(ExtendedCell c) { if (c instanceof ByteBufferExtendedCell) { return new KeyOnlyByteBufferExtendedCell((ByteBufferExtendedCell) c, lenAsVal); } else { @@ -147,11 +152,11 @@ public int hashCode() { } static class KeyOnlyCell implements ExtendedCell { - private Cell cell; + private ExtendedCell cell; private int keyLen; private boolean lenAsVal; - public KeyOnlyCell(Cell c, boolean lenAsVal) { + public KeyOnlyCell(ExtendedCell c, boolean lenAsVal) { this.cell = c; this.lenAsVal = lenAsVal; this.keyLen = KeyValueUtil.keyLength(c); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index 0dec61563494..84bd5e9c08a1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -1315,10 +1315,11 @@ public static MutationProto toMutation(final MutationType type, final Mutation m } ColumnValue.Builder columnBuilder = ColumnValue.newBuilder(); QualifierValue.Builder valueBuilder = QualifierValue.newBuilder(); - for (Map.Entry> family : mutation.getFamilyCellMap().entrySet()) { + for (Map.Entry> family : ClientInternalHelper + .getExtendedFamilyCellMap(mutation).entrySet()) { columnBuilder.clear(); columnBuilder.setFamily(UnsafeByteOperations.unsafeWrap(family.getKey())); - for (Cell cell : family.getValue()) { + for (ExtendedCell cell : family.getValue()) { valueBuilder.clear(); valueBuilder.setQualifier(UnsafeByteOperations.unsafeWrap(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength())); @@ -1420,13 +1421,13 @@ public static ClientProtos.Result toResult(final Result result, boolean encodeTa return toResult(result.getExists(), result.isStale()); } - Cell[] cells = result.rawCells(); + ExtendedCell[] cells = ClientInternalHelper.getExtendedRawCells(result); if (cells == null || cells.length == 0) { return result.isStale() ? EMPTY_RESULT_PB_STALE : EMPTY_RESULT_PB; } ClientProtos.Result.Builder builder = ClientProtos.Result.newBuilder(); - for (Cell c : cells) { + for (ExtendedCell c : cells) { builder.addCell(toCell(c, encodeTags)); } @@ -1980,7 +1981,7 @@ public static void toIOException(ServiceException se) throws IOException { throw new IOException(se); } - public static CellProtos.Cell toCell(final Cell kv, boolean encodeTags) { + public static CellProtos.Cell toCell(final ExtendedCell kv, boolean encodeTags) { // Doing this is going to kill us if we do it for all data passed. // St.Ack 20121205 CellProtos.Cell.Builder kvbuilder = CellProtos.Cell.newBuilder(); @@ -1991,7 +1992,7 @@ public static CellProtos.Cell toCell(final Cell kv, boolean encodeTags) { ((ByteBufferExtendedCell) kv).getFamilyPosition(), kv.getFamilyLength())); kvbuilder.setQualifier(wrap(((ByteBufferExtendedCell) kv).getQualifierByteBuffer(), ((ByteBufferExtendedCell) kv).getQualifierPosition(), kv.getQualifierLength())); - kvbuilder.setCellType(CellProtos.CellType.valueOf(kv.getTypeByte())); + kvbuilder.setCellType(CellProtos.CellType.forNumber(kv.getTypeByte())); kvbuilder.setTimestamp(kv.getTimestamp()); kvbuilder.setValue(wrap(((ByteBufferExtendedCell) kv).getValueByteBuffer(), ((ByteBufferExtendedCell) kv).getValuePosition(), kv.getValueLength())); @@ -2006,7 +2007,7 @@ public static CellProtos.Cell toCell(final Cell kv, boolean encodeTags) { kv.getFamilyLength())); kvbuilder.setQualifier(UnsafeByteOperations.unsafeWrap(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())); - kvbuilder.setCellType(CellProtos.CellType.valueOf(kv.getTypeByte())); + kvbuilder.setCellType(CellProtos.CellType.forNumber(kv.getTypeByte())); kvbuilder.setTimestamp(kv.getTimestamp()); kvbuilder.setValue(UnsafeByteOperations.unsafeWrap(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index 9ac9a6c3ab91..96feaca575f6 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -456,7 +457,8 @@ public void testPutCreationWithByteBuffer() { Assert.assertEquals(1984L, c.get(0).getTimestamp()); Assert.assertArrayEquals(VALUE, CellUtil.cloneValue(c.get(0))); Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimestamp()); - Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); + Assert.assertEquals(0, + CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue((ExtendedCell) c.get(0)))); p = new Put(ROW); p.addColumn(FAMILY, ByteBuffer.wrap(QUALIFIER), 2013L, null); @@ -465,7 +467,8 @@ public void testPutCreationWithByteBuffer() { Assert.assertEquals(2013L, c.get(0).getTimestamp()); Assert.assertArrayEquals(new byte[] {}, CellUtil.cloneValue(c.get(0))); Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimestamp()); - Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); + Assert.assertEquals(0, + CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue((ExtendedCell) c.get(0)))); p = new Put(ByteBuffer.wrap(ROW)); p.addColumn(FAMILY, ByteBuffer.wrap(QUALIFIER), 2001L, null); @@ -475,7 +478,8 @@ public void testPutCreationWithByteBuffer() { Assert.assertArrayEquals(new byte[] {}, CellUtil.cloneValue(c.get(0))); Assert.assertArrayEquals(ROW, CellUtil.cloneRow(c.get(0))); Assert.assertEquals(HConstants.LATEST_TIMESTAMP, p.getTimestamp()); - Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); + Assert.assertEquals(0, + CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue((ExtendedCell) c.get(0)))); p = new Put(ByteBuffer.wrap(ROW), 1970L); p.addColumn(FAMILY, ByteBuffer.wrap(QUALIFIER), 2001L, null); @@ -485,7 +489,8 @@ public void testPutCreationWithByteBuffer() { Assert.assertArrayEquals(new byte[] {}, CellUtil.cloneValue(c.get(0))); Assert.assertArrayEquals(ROW, CellUtil.cloneRow(c.get(0))); Assert.assertEquals(1970L, p.getTimestamp()); - Assert.assertEquals(0, CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue(c.get(0)))); + Assert.assertEquals(0, + CellComparatorImpl.COMPARATOR.compare(c.get(0), new KeyValue((ExtendedCell) c.get(0)))); } @Test diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java index acc561812853..ee0a711634de 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java @@ -501,8 +501,7 @@ public void testRegionLockInfo() { */ @Test public void testCellConversionWithTags() { - - Cell cell = getCellWithTags(); + ExtendedCell cell = getCellWithTags(); CellProtos.Cell protoCell = ProtobufUtil.toCell(cell, true); assertNotNull(protoCell); @@ -514,7 +513,7 @@ public void testCellConversionWithTags() { assertEquals(TAG_STR, Tag.getValueAsString(decodedTag)); } - private Cell getCellWithTags() { + private ExtendedCell getCellWithTags() { Tag tag = new ArrayBackedTag(TAG_TYPE, TAG_STR); ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY); cellBuilder.setRow(Bytes.toBytes("row1")); @@ -539,7 +538,7 @@ private ExtendedCell getCellFromProtoResult(CellProtos.Cell protoCell, boolean d */ @Test public void testCellConversionWithoutTags() { - Cell cell = getCellWithTags(); + ExtendedCell cell = getCellWithTags(); CellProtos.Cell protoCell = ProtobufUtil.toCell(cell, false); assertNotNull(protoCell); @@ -555,7 +554,7 @@ public void testCellConversionWithoutTags() { */ @Test public void testTagEncodeFalseDecodeTrue() { - Cell cell = getCellWithTags(); + ExtendedCell cell = getCellWithTags(); CellProtos.Cell protoCell = ProtobufUtil.toCell(cell, false); assertNotNull(protoCell); @@ -571,7 +570,7 @@ public void testTagEncodeFalseDecodeTrue() { */ @Test public void testTagEncodeTrueDecodeFalse() { - Cell cell = getCellWithTags(); + ExtendedCell cell = getCellWithTags(); CellProtos.Cell protoCell = ProtobufUtil.toCell(cell, true); assertNotNull(protoCell); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java index 027451956ee7..1cbc6cb7497a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java @@ -114,26 +114,15 @@ public interface Cell extends HeapSize { // 5) Type /** - * Return the byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Use {@link #getType()}. - */ - @Deprecated - byte getTypeByte(); - - // 6) SequenceId - - /** - * A region-specific unique monotonically increasing sequence ID given to each Cell. It always - * exists for cells in the memstore but is not retained forever. It will be kept for - * {@link HConstants#KEEP_SEQID_PERIOD} days, but generally becomes irrelevant after the cell's - * row is no longer involved in any operations that require strict consistency. - * @return seqId (always > 0 if exists), or 0 if it no longer exists - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. + * Returns the type of cell in a human readable format using {@link Type}. + *

+ * Note : This does not expose the internal types of Cells like {@link KeyValue.Type#Maximum} and + * {@link KeyValue.Type#Minimum} + * @return The data type this cell: one of Put, Delete, etc */ - @Deprecated - long getSequenceId(); + Type getType(); - // 7) Value + // 6) Value /** * Contiguous raw bytes that may start at any index in the containing array. Max length is @@ -151,48 +140,6 @@ public interface Cell extends HeapSize { /** Returns Serialized size (defaults to include tag length if has some tags). */ int getSerializedSize(); - /** - * Contiguous raw bytes representing tags that may start at any index in the containing array. - * @return the tags byte array - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal. - */ - @Deprecated - byte[] getTagsArray(); - - /** - * Return the first offset where the tags start in the Cell - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal. - */ - @Deprecated - int getTagsOffset(); - - /** - * HBase internally uses 2 bytes to store tags length in Cell. As the tags length is always a - * non-negative number, to make good use of the sign bit, the max of tags length is defined 2 * - * Short.MAX_VALUE + 1 = 65535. As a result, the return type is int, because a short is not - * capable of handling that. Please note that even if the return type is int, the max tags length - * is far less than Integer.MAX_VALUE. - * @return the total length of the tags in the Cell. - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal. - */ - @Deprecated - int getTagsLength(); - - /** - * Returns the type of cell in a human readable format using {@link Type}. Note : This does not - * expose the internal types of Cells like {@link KeyValue.Type#Maximum} and - * {@link KeyValue.Type#Minimum} - * @return The data type this cell: one of Put, Delete, etc - */ - default Type getType() { - byte byteType = getTypeByte(); - Type t = Type.CODE_ARRAY[byteType & 0xff]; - if (t != null) { - return t; - } - throw new UnsupportedOperationException("Invalid type of cell " + byteType); - } - /** * The valid types for user to build the cell. Currently, This is subset of {@link KeyValue.Type}. */ @@ -216,13 +163,5 @@ enum Type { public byte getCode() { return this.code; } - - private static final Type[] CODE_ARRAY = new Type[256]; - - static { - for (Type t : Type.values()) { - CODE_ARRAY[t.code & 0xff] = t; - } - } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java index 0cd0905cc3a6..0e6a53ca7c47 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparatorImpl.java @@ -96,8 +96,12 @@ public int compare(final Cell l, final Cell r, boolean ignoreSequenceid) { return diff; } } + + if (ignoreSequenceid) { + return diff; + } // Negate following comparisons so later edits show up first mvccVersion: later sorts first - return ignoreSequenceid ? diff : Long.compare(r.getSequenceId(), l.getSequenceId()); + return Long.compare(PrivateCellUtil.getSequenceId(r), PrivateCellUtil.getSequenceId(l)); } private int compareKeyValues(final KeyValue left, final KeyValue right) { @@ -720,11 +724,13 @@ public final int compareWithoutRow(final Cell left, final Cell right) { int rFamLength = right.getFamilyLength(); int lQualLength = left.getQualifierLength(); int rQualLength = right.getQualifierLength(); - if (lFamLength + lQualLength == 0 && left.getTypeByte() == KeyValue.Type.Minimum.getCode()) { + byte leftType = PrivateCellUtil.getTypeByte(left); + byte rightType = PrivateCellUtil.getTypeByte(right); + if (lFamLength + lQualLength == 0 && leftType == KeyValue.Type.Minimum.getCode()) { // left is "bigger", i.e. it appears later in the sorted order return 1; } - if (rFamLength + rQualLength == 0 && right.getTypeByte() == KeyValue.Type.Minimum.getCode()) { + if (rFamLength + rQualLength == 0 && rightType == KeyValue.Type.Minimum.getCode()) { return -1; } if (lFamLength != rFamLength) { @@ -746,7 +752,7 @@ public final int compareWithoutRow(final Cell left, final Cell right) { // of higher numbers sort before those of lesser numbers. Maximum (255) // appears ahead of everything, and minimum (0) appears after // everything. - return (0xff & right.getTypeByte()) - (0xff & left.getTypeByte()); + return (0xff & rightType) - (0xff & leftType); } @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 10213b143632..3ee3a5159c47 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -570,15 +570,13 @@ public static boolean matchingTags(final Cell left, final Cell right) { * Return true if a delete type, a {@link KeyValue.Type#Delete} or a {KeyValue.Type#DeleteFamily} * or a {@link KeyValue.Type#DeleteColumn} KeyValue type. */ - @SuppressWarnings("deprecation") public static boolean isDelete(final Cell cell) { - return PrivateCellUtil.isDelete(cell.getTypeByte()); + return PrivateCellUtil.isDelete(PrivateCellUtil.getTypeByte(cell)); } /** Returns True if this cell is a Put. */ - @SuppressWarnings("deprecation") public static boolean isPut(Cell cell) { - return cell.getTypeByte() == KeyValue.Type.Put.getCode(); + return PrivateCellUtil.getTypeByte(cell) == KeyValue.Type.Put.getCode(); } /** @@ -629,13 +627,21 @@ public static String getCellKeyAsString(Cell cell, Function rowCon sb.append('/'); sb.append(KeyValue.humanReadableTimestamp(cell.getTimestamp())); sb.append('/'); - sb.append(KeyValue.Type.codeToType(cell.getTypeByte())); + if (cell instanceof ExtendedCell) { + sb.append(KeyValue.Type.codeToType(((ExtendedCell) cell).getTypeByte())); + } else { + sb.append(cell.getType()); + } + if (!(cell instanceof KeyValue.KeyOnlyKeyValue)) { sb.append("/vlen="); sb.append(cell.getValueLength()); } - sb.append("/seqid="); - sb.append(cell.getSequenceId()); + if (cell instanceof ExtendedCell) { + sb.append("/seqid="); + sb.append(((ExtendedCell) cell).getSequenceId()); + } + return sb.toString(); } @@ -651,8 +657,12 @@ public static String toString(Cell cell, boolean verbose) { String value = null; if (verbose) { // TODO: pretty print tags as well - if (cell.getTagsLength() > 0) { - tag = Bytes.toStringBinary(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); + if (cell instanceof RawCell) { + RawCell rawCell = (RawCell) cell; + if (rawCell.getTagsLength() > 0) { + tag = Bytes.toStringBinary(rawCell.getTagsArray(), rawCell.getTagsOffset(), + rawCell.getTagsLength()); + } } if (!(cell instanceof KeyValue.KeyOnlyKeyValue)) { value = @@ -675,7 +685,8 @@ public static String toString(Cell cell, boolean verbose) { public static boolean equals(Cell a, Cell b) { return matchingRows(a, b) && matchingFamily(a, b) && matchingQualifier(a, b) - && matchingTimestamp(a, b) && a.getTypeByte() == b.getTypeByte(); + && matchingTimestamp(a, b) + && PrivateCellUtil.getTypeByte(a) == PrivateCellUtil.getTypeByte(b); } public static boolean matchingTimestamp(Cell a, Cell b) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java index 28e648ec466e..f3bf6624aaff 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java @@ -168,4 +168,15 @@ default int getChunkId() { /** Returns The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc */ byte getTypeByte(); + + /** + * Typically, at server side, you'd better always use the {@link #getTypeByte()} as this method + * does not expose the {@code Maximum} and {@code Minimum} because they will not be returned to + * client, but at server side, we do have cells with these types so if you use this method it will + * cause exceptions. + */ + @Override + default Type getType() { + return PrivateCellUtil.code2Type(getTypeByte()); + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index a87a5214fadf..106875a59dde 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -673,7 +673,7 @@ public KeyValue(byte[] row, int roffset, int rlength, byte[] family, int foffset this.offset = 0; } - public KeyValue(Cell c) { + public KeyValue(ExtendedCell c) { this(c.getRowArray(), c.getRowOffset(), c.getRowLength(), c.getFamilyArray(), c.getFamilyOffset(), c.getFamilyLength(), c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength(), c.getTimestamp(), Type.codeToType(c.getTypeByte()), c.getValueArray(), @@ -992,7 +992,7 @@ public int hashCode() { return calculateHashForKey(this); } - private int calculateHashForKey(Cell cell) { + private int calculateHashForKey(ExtendedCell cell) { // pre-calculate the 3 hashes made of byte ranges int rowHash = Bytes.hashCode(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()); int familyHash = diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 6c8f2e6e4edb..216e7410d4a4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -22,11 +22,9 @@ import java.io.EOFException; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.IOUtils; @@ -708,7 +706,6 @@ public static KeyValue create(final DataInput in) throws IOException { * useful marking a stream as done. */ public static KeyValue create(int length, final DataInput in) throws IOException { - if (length <= 0) { if (length == 0) return null; throw new IOException("Failed read " + length + " bytes, stream corrupt?"); @@ -720,60 +717,4 @@ public static KeyValue create(int length, final DataInput in) throws IOException in.readFully(bytes); return new KeyValue(bytes, 0, length); } - - public static int getSerializedSize(Cell cell, boolean withTags) { - if (withTags) { - return cell.getSerializedSize(); - } - if (cell instanceof ExtendedCell) { - return ((ExtendedCell) cell).getSerializedSize(withTags); - } - return length(cell.getRowLength(), cell.getFamilyLength(), cell.getQualifierLength(), - cell.getValueLength(), cell.getTagsLength(), withTags); - } - - public static int oswrite(final Cell cell, final OutputStream out, final boolean withTags) - throws IOException { - if (cell instanceof ExtendedCell) { - return ((ExtendedCell) cell).write(out, withTags); - } else { - short rlen = cell.getRowLength(); - byte flen = cell.getFamilyLength(); - int qlen = cell.getQualifierLength(); - int vlen = cell.getValueLength(); - int tlen = cell.getTagsLength(); - // write key length - int klen = keyLength(rlen, flen, qlen); - ByteBufferUtils.putInt(out, klen); - // write value length - ByteBufferUtils.putInt(out, vlen); - // Write rowkey - 2 bytes rk length followed by rowkey bytes - StreamUtils.writeShort(out, rlen); - out.write(cell.getRowArray(), cell.getRowOffset(), rlen); - // Write cf - 1 byte of cf length followed by the family bytes - out.write(flen); - out.write(cell.getFamilyArray(), cell.getFamilyOffset(), flen); - // write qualifier - out.write(cell.getQualifierArray(), cell.getQualifierOffset(), qlen); - // write timestamp - StreamUtils.writeLong(out, cell.getTimestamp()); - // write the type - out.write(cell.getTypeByte()); - // write value - out.write(cell.getValueArray(), cell.getValueOffset(), vlen); - int size = klen + vlen + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE; - // write tags if we have to - if (withTags && tlen > 0) { - // 2 bytes tags length followed by tags bytes - // tags length is serialized with 2 bytes only(short way) even if the - // type is int. As this - // is non -ve numbers, we save the sign bit. See HBASE-11437 - out.write((byte) (0xff & (tlen >> 8))); - out.write((byte) (0xff & tlen)); - out.write(cell.getTagsArray(), cell.getTagsOffset(), tlen); - size += tlen + KeyValue.TAGS_LENGTH_SIZE; - } - return size; - } - } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaCellComparator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaCellComparator.java index 43d9e3ee9d7f..2a7d86ded111 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaCellComparator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/MetaCellComparator.java @@ -24,8 +24,6 @@ import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; -import org.apache.hbase.thirdparty.com.google.common.primitives.Longs; - /** * A {@link CellComparatorImpl} for hbase:meta catalog table {@link KeyValue}s. */ @@ -92,8 +90,11 @@ public int compare(final Cell a, final Cell b, boolean ignoreSequenceid) { return diff; } + if (ignoreSequenceid) { + return diff; + } // Negate following comparisons so later edits show up first mvccVersion: later sorts first - return ignoreSequenceid ? diff : Longs.compare(b.getSequenceId(), a.getSequenceId()); + return Long.compare(PrivateCellUtil.getSequenceId(b), PrivateCellUtil.getSequenceId(a)); } @FunctionalInterface diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java index bf514d81c5a2..95c46fb7b9e6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java @@ -32,6 +32,7 @@ import java.util.Map.Entry; import java.util.NavigableMap; import java.util.Optional; +import org.apache.hadoop.hbase.Cell.Type; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.util.Dictionary; @@ -3063,4 +3064,36 @@ public boolean advance() { } }; } + + private static final Cell.Type[] CELL_TYPE_CODE_ARRAY = new Cell.Type[256]; + + static { + for (Type t : Type.values()) { + CELL_TYPE_CODE_ARRAY[t.getCode() & 0xff] = t; + } + } + + public static Cell.Type code2Type(byte code) { + Type t = CELL_TYPE_CODE_ARRAY[code & 0xff]; + if (t != null) { + return t; + } + throw new UnsupportedOperationException("Invalid type of cell " + code); + } + + public static byte getTypeByte(Cell c) { + if (c instanceof ExtendedCell) { + return ((ExtendedCell) c).getTypeByte(); + } else { + return c.getType().getCode(); + } + } + + public static long getSequenceId(Cell c) { + if (c instanceof ExtendedCell) { + return ((ExtendedCell) c).getSequenceId(); + } else { + return HConstants.NO_SEQNUM; + } + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java index 5ba344770a3d..3b638f28f72b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java @@ -33,6 +33,27 @@ public interface RawCell extends Cell { static final int MAX_TAGS_LENGTH = (2 * Short.MAX_VALUE) + 1; + /** + * Contiguous raw bytes representing tags that may start at any index in the containing array. + * @return the tags byte array + */ + byte[] getTagsArray(); + + /** + * Return the first offset where the tags start in the Cell + */ + int getTagsOffset(); + + /** + * HBase internally uses 2 bytes to store tags length in Cell. As the tags length is always a + * non-negative number, to make good use of the sign bit, the max of tags length is defined 2 * + * Short.MAX_VALUE + 1 = 65535. As a result, the return type is int, because a short is not + * capable of handling that. Please note that even if the return type is int, the max tags length + * is far less than Integer.MAX_VALUE. + * @return the total length of the tags in the Cell. + */ + int getTagsLength(); + /** * Allows cloning the tags in the cell to a new byte[] * @return the byte[] having the tags diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java index ef40b395b7b1..68fd8f420ed7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java @@ -58,8 +58,8 @@ public KeyValueEncoder(final OutputStream out) { public void write(ExtendedCell cell) throws IOException { checkFlushed(); // Do not write tags over RPC - ByteBufferUtils.putInt(this.out, KeyValueUtil.getSerializedSize(cell, false)); - KeyValueUtil.oswrite(cell, out, false); + ByteBufferUtils.putInt(this.out, cell.getSerializedSize(false)); + cell.write(out, false); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java index 655bc4c5f261..fc2e63693025 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java @@ -61,8 +61,8 @@ public KeyValueEncoder(final OutputStream out) { public void write(ExtendedCell cell) throws IOException { checkFlushed(); // Write tags - ByteBufferUtils.putInt(this.out, KeyValueUtil.getSerializedSize(cell, true)); - KeyValueUtil.oswrite(cell, out, true); + ByteBufferUtils.putInt(this.out, cell.getSerializedSize(true)); + cell.write(out, true); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java index 7fb4fd9685e9..f0f63d23c51f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java @@ -21,7 +21,6 @@ import java.io.IOException; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.io.WritableUtils; import org.apache.yetus.audience.InterfaceAudience; @@ -41,7 +40,7 @@ public int write(ExtendedCell cell) throws IOException { // We write tags seperately because though there is no tag in KV // if the hfilecontext says include tags we need the tags length to be // written - int size = KeyValueUtil.oswrite(cell, out, false); + int size = cell.write(out, false); // Write the additional tag into the stream if (encodingCtx.getHFileContext().isIncludesTags()) { int tagsLength = cell.getTagsLength(); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java index 15ccf3a04554..7860b62dfa70 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java @@ -79,7 +79,7 @@ public void testExtendedCellBuilderWithShallowCopy() { byte[] value = new byte[] { OLD_DATA }; byte[] tags = new byte[] { OLD_DATA }; long seqId = 999; - Cell cell = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(row) + ExtendedCell cell = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(row) .setFamily(family).setQualifier(qualifier).setType(KeyValue.Type.Put.getCode()) .setValue(value).setTags(tags).setSequenceId(seqId).build(); row[0] = NEW_DATA; @@ -103,7 +103,7 @@ public void testExtendedCellBuilderWithDeepCopy() { byte[] value = new byte[] { OLD_DATA }; byte[] tags = new byte[] { OLD_DATA }; long seqId = 999; - Cell cell = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row) + ExtendedCell cell = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row) .setFamily(family).setQualifier(qualifier).setType(KeyValue.Type.Put.getCode()) .setValue(value).setTags(tags).setSequenceId(seqId).build(); row[0] = NEW_DATA; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java index 20c279366c36..0a1cf713fea9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java @@ -152,10 +152,10 @@ public void testNullFamilyQualifierValueTags() { byte[] value = null; byte[] tags = null; - Cell ic1 = + ExtendedCell ic1 = new IndividualBytesFieldCell(row, family, qualifier, timestamp, type, seqId, value, tags); - Cell kv1 = new KeyValue(row, family, qualifier, timestamp, type, value, tags); + ExtendedCell kv1 = new KeyValue(row, family, qualifier, timestamp, type, value, tags); byte[] familyArrayInKV = Bytes.copy(kv1.getFamilyArray(), kv1.getFamilyOffset(), kv1.getFamilyLength()); byte[] qualifierArrayInKV = diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index d345dce15ac1..1644a6f1fce7 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -578,8 +578,8 @@ public void testKeyValueSerialization() throws Exception { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); for (KeyValue kv : keyValues) { DataOutputStream os = new DataOutputStream(byteArrayOutputStream); - ByteBufferUtils.putInt(os, KeyValueUtil.getSerializedSize(kv, true)); - KeyValueUtil.oswrite(kv, os, true); + ByteBufferUtils.putInt(os, kv.getSerializedSize(true)); + kv.write(os, true); } DataInputStream is = new DataInputStream(new ByteArrayInputStream(byteArrayOutputStream.toByteArray())); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java index 888e285f340e..99b1dd112b98 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java @@ -32,6 +32,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; @@ -49,6 +50,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.MapReduceExtendedCell; import org.apache.hadoop.hbase.wal.WALEdit; +import org.apache.hadoop.hbase.wal.WALEditInternalHelper; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; @@ -167,8 +169,8 @@ public void map(WALKey key, WALEdit value, Context context) throws IOException { ImmutableBytesWritable tableOut = new ImmutableBytesWritable(targetTable.getName()); Put put = null; Delete del = null; - Cell lastCell = null; - for (Cell cell : value.getCells()) { + ExtendedCell lastCell = null; + for (ExtendedCell cell : WALEditInternalHelper.getExtendedCells(value)) { context.getCounter(Counter.CELLS_READ).increment(1); // Filtering WAL meta marker entries. if (WALEdit.isMetaEditFamily(cell)) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index ce9cab6bf3b2..8082a2db69c8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -2216,7 +2216,7 @@ public ReplicateWALEntryResponse replicateWALEntry(final RpcController controlle requestCount.increment(); List entries = request.getEntryList(); checkShouldRejectReplicationRequest(entries); - CellScanner cellScanner = getAndReset(controller); + ExtendedCellScanner cellScanner = getAndReset(controller); server.getRegionServerCoprocessorHost().preReplicateLogEntries(); server.getReplicationSinkService().replicateLogEntries(entries, cellScanner, request.getReplicationClusterId(), request.getSourceBaseNamespaceDirPath(), diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java index 5f893efd88f3..c5b853a23bb3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.util.List; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry; @@ -40,6 +40,7 @@ public interface ReplicationSinkService extends ReplicationService { * directory required for replicating hfiles * @param sourceHFileArchiveDirPath Path that point to the source cluster hfile archive directory */ - void replicateLogEntries(List entries, CellScanner cells, String replicationClusterId, - String sourceBaseNamespaceDirPath, String sourceHFileArchiveDirPath) throws IOException; + void replicateLogEntries(List entries, ExtendedCellScanner cells, + String replicationClusterId, String sourceBaseNamespaceDirPath, + String sourceHFileArchiveDirPath) throws IOException; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java index c6e1dfe01718..dff9ac0efe5b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java @@ -237,7 +237,8 @@ boolean passesBloomFilter(Scan scan, final SortedSet columns) { if (columns != null && columns.size() == 1) { byte[] column = columns.first(); // create the required fake key - Cell kvKey = PrivateCellUtil.createFirstOnRow(row, HConstants.EMPTY_BYTE_ARRAY, column); + ExtendedCell kvKey = + PrivateCellUtil.createFirstOnRow(row, HConstants.EMPTY_BYTE_ARRAY, column); return passesGeneralRowColBloomFilter(kvKey); } @@ -307,14 +308,14 @@ private boolean passesGeneralRowBloomFilter(byte[] row, int rowOffset, int rowLe * multi-column query. the cell to check if present in BloomFilter * @return True if passes */ - public boolean passesGeneralRowColBloomFilter(Cell cell) { + public boolean passesGeneralRowColBloomFilter(ExtendedCell cell) { BloomFilter bloomFilter = this.generalBloomFilter; if (bloomFilter == null) { bloomFilterMetrics.incrementEligible(); return true; } // Used in ROW_COL bloom - Cell kvKey = null; + ExtendedCell kvKey = null; // Already if the incoming key is a fake rowcol key then use it as it is if (cell.getTypeByte() == KeyValue.Type.Maximum.getCode() && cell.getFamilyLength() == 0) { kvKey = cell; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index 87154a62066c..8645f6054f89 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -397,8 +397,8 @@ public EnsureKvEncoder(OutputStream out) { public void write(ExtendedCell cell) throws IOException { checkFlushed(); // Make sure to write tags into WAL - ByteBufferUtils.putInt(this.out, KeyValueUtil.getSerializedSize(cell, true)); - KeyValueUtil.oswrite(cell, this.out, true); + ByteBufferUtils.putInt(this.out, cell.getSerializedSize(true)); + cell.write(out, true); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java index c06c6d19a654..9e65ae7c364a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java @@ -21,9 +21,9 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.wal.WALEdit; @@ -49,7 +49,7 @@ public class BulkLoadCellFilter { * @param famPredicate Returns true of given family should be removed. * @return The filtered cell. */ - public Cell filterCell(Cell cell, Predicate famPredicate) { + public ExtendedCell filterCell(ExtendedCell cell, Predicate famPredicate) { byte[] fam; BulkLoadDescriptor bld = null; try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java index 82ac9ebd1f32..0f4e62ec9ad9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java @@ -19,6 +19,8 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WALEdit; @@ -49,11 +51,13 @@ public Entry filter(Entry entry) { @Override public Cell filterCell(final Entry entry, Cell cell) { + ExtendedCell extendedCell = PrivateCellUtil.ensureExtendedCell(cell); ReplicationPeerConfig peerConfig = this.peer.getPeerConfig(); TableName tableName = entry.getKey().getTableName(); if (CellUtil.matchingColumn(cell, WALEdit.METAFAMILY, WALEdit.BULK_LOAD)) { // If the cell is about BULKLOAD event, unpack and filter it by BulkLoadCellFilter. - return bulkLoadFilter.filterCell(cell, fam -> !peerConfig.needToReplicate(tableName, fam)); + return bulkLoadFilter.filterCell(extendedCell, + fam -> !peerConfig.needToReplicate(tableName, fam)); } else { return peerConfig.needToReplicate(tableName, CellUtil.cloneFamily(cell)) ? cell : null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationSinkServiceImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationSinkServiceImpl.java index c8141b683406..acaf5756879f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationSinkServiceImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationSinkServiceImpl.java @@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.ScheduledChore; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.Stoppable; @@ -56,7 +56,7 @@ public class ReplicationSinkServiceImpl implements ReplicationSinkService { private int statsPeriodInSecond; @Override - public void replicateLogEntries(List entries, CellScanner cells, + public void replicateLogEntries(List entries, ExtendedCellScanner cells, String replicationClusterId, String sourceBaseNamespaceDirPath, String sourceHFileArchiveDirPath) throws IOException { this.replicationSink.replicateEntries(entries, cells, replicationClusterId, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java index 6dc41bcc014a..897e06f4a9fd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java @@ -20,7 +20,9 @@ import java.util.NavigableMap; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.yetus.audience.InterfaceAudience; @@ -50,13 +52,14 @@ private boolean hasGlobalScope(NavigableMap scopes, byte[] fami @Override public Cell filterCell(Entry entry, Cell cell) { + ExtendedCell extendedCell = PrivateCellUtil.ensureExtendedCell(cell); NavigableMap scopes = entry.getKey().getReplicationScopes(); if (scopes == null || scopes.isEmpty()) { return null; } byte[] family = CellUtil.cloneFamily(cell); if (CellUtil.matchingColumn(cell, WALEdit.METAFAMILY, WALEdit.BULK_LOAD)) { - return bulkLoadFilter.filterCell(cell, new Predicate() { + return bulkLoadFilter.filterCell(extendedCell, new Predicate() { @Override public boolean apply(byte[] family) { return !hasGlobalScope(scopes, family); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java index 8610a6d43bd7..ff8adfceec0b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java @@ -44,8 +44,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -192,7 +193,7 @@ private void decorateConf() { * @param sourceHFileArchiveDirPath Path that point to the source cluster hfile archive directory * @throws IOException If failed to replicate the data */ - public void replicateEntries(List entries, final CellScanner cells, + public void replicateEntries(List entries, final ExtendedCellScanner cells, String replicationClusterId, String sourceBaseNamespaceDirPath, String sourceHFileArchiveDirPath) throws IOException { if (entries.isEmpty()) { @@ -225,7 +226,7 @@ public void replicateEntries(List entries, final CellScanner cells, continue; } } - Cell previousCell = null; + ExtendedCell previousCell = null; Mutation mutation = null; int count = entry.getAssociatedCellCount(); for (int i = 0; i < count; i++) { @@ -234,7 +235,7 @@ public void replicateEntries(List entries, final CellScanner cells, this.metrics.incrementFailedBatches(); throw new ArrayIndexOutOfBoundsException("Expected=" + count + ", index=" + i); } - Cell cell = cells.current(); + ExtendedCell cell = cells.current(); // Handle bulk load hfiles replication if (CellUtil.matchingQualifier(cell, WALEdit.BULK_LOAD)) { BulkLoadDescriptor bld = WALEdit.getBulkLoadDescriptor(cell); @@ -430,7 +431,7 @@ private String getHFilePath(TableName table, BulkLoadDescriptor bld, String stor } /** Returns True if we have crossed over onto a new row or type */ - private boolean isNewRowOrType(final Cell previousCell, final Cell cell) { + private boolean isNewRowOrType(final ExtendedCell previousCell, final ExtendedCell cell) { return previousCell == null || previousCell.getTypeByte() != cell.getTypeByte() || !CellUtil.matchingRows(previousCell, cell); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index f0158f299f22..9ccf3c85d611 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -563,7 +563,7 @@ private boolean checkCoveringPermission(User user, OpType request, RegionCoproce (col.getQualifierLength() == 0 && request == OpType.DELETE) || CellUtil.matchingQualifier(cell, col) ) { - byte type = col.getTypeByte(); + byte type = PrivateCellUtil.getTypeByte(col); if (considerCellTs) { curColCheckTs = col.getTimestamp(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitUtil.java index fab9936165d1..626cefad3f78 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitUtil.java @@ -35,7 +35,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellScanner; @@ -490,7 +489,7 @@ public static List getMutationsFromWALEntry(AdminProtos.WALEntry : entry.getKey().getLogSequenceNumber(); int count = entry.getAssociatedCellCount(); List mutations = new ArrayList<>(); - Cell previousCell = null; + ExtendedCell previousCell = null; Mutation m = null; WALKeyImpl key = null; WALEdit val = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index ac9d1fd1fa8d..53c7bca15af9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -892,7 +892,7 @@ public void testGetShortMidpoint() { long ts = 5; KeyValue kv1 = new KeyValue(Bytes.toBytes("the quick brown fox"), family, qualA, ts, Type.Put); KeyValue kv2 = new KeyValue(Bytes.toBytes("the who test text"), family, qualA, ts, Type.Put); - Cell newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2); + ExtendedCell newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2); assertTrue(keyComparator.compare(kv1, newKey) < 0); assertTrue((keyComparator.compare(kv2, newKey)) > 0); byte[] expectedArray = Bytes.toBytes("the r"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplication.java index 7ab7578df1c2..d9aed0e6871d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplication.java @@ -36,9 +36,9 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; -import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -278,7 +278,7 @@ protected void assertTableNoValue(Table table, byte[] row, byte[] value) throws private String createHFileForFamilies(byte[] row, byte[] value, Configuration clusterConfig) throws IOException { - CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.DEEP_COPY); + ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY); cellBuilder.setRow(row).setFamily(TestReplicationBase.famName).setQualifier(Bytes.toBytes("1")) .setValue(value).setType(Cell.Type.Put); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplicationHFileRefs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplicationHFileRefs.java index bfc80232792f..067b3c45e162 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplicationHFileRefs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoadReplicationHFileRefs.java @@ -32,9 +32,9 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; -import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -256,7 +256,7 @@ protected void bulkLoadOnCluster(TableName tableName, byte[] family) throws Exce } private String createHFileForFamilies(byte[] family) throws IOException { - CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.DEEP_COPY); + ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY); cellBuilder.setRow(row).setFamily(family).setQualifier(qualifier).setValue(value) .setType(Cell.Type.Put); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 99c811e720c0..d923327f5b72 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -106,6 +106,7 @@ import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.CheckAndMutate; import org.apache.hadoop.hbase.client.CheckAndMutateResult; +import org.apache.hadoop.hbase.client.ClientInternalHelper; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Delete; @@ -4839,9 +4840,9 @@ public void doAnAction() throws Exception { } assertTrue(timestamp >= prevTimestamp); prevTimestamp = timestamp; - Cell previousKV = null; + ExtendedCell previousKV = null; - for (Cell kv : result.rawCells()) { + for (ExtendedCell kv : ClientInternalHelper.getExtendedRawCells(result)) { byte[] thisValue = CellUtil.cloneValue(kv); if (previousKV != null) { if (Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue) != 0) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreSegmentsIterator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreSegmentsIterator.java index 6f5ef2c10257..e64b4cc60471 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreSegmentsIterator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreSegmentsIterator.java @@ -25,8 +25,8 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -135,12 +135,12 @@ protected void closeTestSegments(List segments) { protected void verifyNext(MemStoreSegmentsIterator iterator) { // check first cell assertTrue(iterator.hasNext()); - Cell firstCell = iterator.next(); + ExtendedCell firstCell = iterator.next(); assertEquals(LESS_THAN_INTEGER_MAX_VALUE_SEQ_ID, firstCell.getSequenceId()); // check second cell assertTrue(iterator.hasNext()); - Cell secondCell = iterator.next(); + ExtendedCell secondCell = iterator.next(); assertEquals(GREATER_THAN_INTEGER_MAX_VALUE_SEQ_ID, secondCell.getSequenceId()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java index b04a0054276c..48d42ae18276 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java @@ -311,9 +311,9 @@ public void testFlushAndCompactionWithoutTags() throws Exception { try { Result[] next = scanner.next(3); for (Result result : next) { - CellScanner cellScanner = result.cellScanner(); + ExtendedCellScanner cellScanner = result.cellScanner(); cellScanner.advance(); - Cell current = cellScanner.current(); + ExtendedCell current = cellScanner.current(); assertEquals(0, current.getTagsLength()); } } finally { @@ -328,9 +328,9 @@ public void testFlushAndCompactionWithoutTags() throws Exception { try { Result[] next = scanner.next(3); for (Result result : next) { - CellScanner cellScanner = result.cellScanner(); + ExtendedCellScanner cellScanner = result.cellScanner(); cellScanner.advance(); - Cell current = cellScanner.current(); + ExtendedCell current = cellScanner.current(); assertEquals(0, current.getTagsLength()); } } finally { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java index dc634632c946..bdb51ebe36f8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java @@ -35,12 +35,12 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; @@ -62,6 +62,7 @@ import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.HFileTestUtil; +import org.apache.hadoop.hbase.wal.WALEditInternalHelper; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; @@ -165,11 +166,11 @@ public void setUp() throws Exception { @Test public void testBatchSink() throws Exception { List entries = new ArrayList<>(BATCH_SIZE); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (int i = 0; i < BATCH_SIZE; i++) { entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells)); } - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); Scan scan = new Scan(); ResultScanner scanRes = table1.getScanner(scan); @@ -182,12 +183,12 @@ public void testBatchSink() throws Exception { @Test public void testMixedPutDelete() throws Exception { List entries = new ArrayList<>(BATCH_SIZE / 2); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (int i = 0; i < BATCH_SIZE / 2; i++) { entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells)); } - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells), replicationClusterId, - baseNamespaceDir, hfileArchiveDir); + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells), + replicationClusterId, baseNamespaceDir, hfileArchiveDir); entries = new ArrayList<>(BATCH_SIZE); cells = new ArrayList<>(); @@ -196,7 +197,7 @@ public void testMixedPutDelete() throws Exception { i % 2 != 0 ? KeyValue.Type.Put : KeyValue.Type.DeleteColumn, cells)); } - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); Scan scan = new Scan(); ResultScanner scanRes = table1.getScanner(scan); @@ -206,12 +207,12 @@ public void testMixedPutDelete() throws Exception { @Test public void testLargeEditsPutDelete() throws Exception { List entries = new ArrayList<>(); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (int i = 0; i < 5510; i++) { entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells)); } - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells), replicationClusterId, - baseNamespaceDir, hfileArchiveDir); + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells), + replicationClusterId, baseNamespaceDir, hfileArchiveDir); ResultScanner resultScanner = table1.getScanner(new Scan()); int totalRows = 0; @@ -226,8 +227,8 @@ public void testLargeEditsPutDelete() throws Exception { entries.add(createEntry(TABLE_NAME1, i, i % 2 != 0 ? KeyValue.Type.Put : KeyValue.Type.DeleteColumn, cells)); } - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells), replicationClusterId, - baseNamespaceDir, hfileArchiveDir); + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells), + replicationClusterId, baseNamespaceDir, hfileArchiveDir); resultScanner = table1.getScanner(new Scan()); totalRows = 0; while (resultScanner.next() != null) { @@ -242,12 +243,12 @@ public void testLargeEditsPutDelete() throws Exception { @Test public void testMixedPutTables() throws Exception { List entries = new ArrayList<>(BATCH_SIZE / 2); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (int i = 0; i < BATCH_SIZE; i++) { entries.add(createEntry(i % 2 == 0 ? TABLE_NAME2 : TABLE_NAME1, i, KeyValue.Type.Put, cells)); } - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); Scan scan = new Scan(); ResultScanner scanRes = table2.getScanner(scan); @@ -266,11 +267,11 @@ public void testMixedPutTables() throws Exception { @Test public void testMixedDeletes() throws Exception { List entries = new ArrayList<>(3); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (int i = 0; i < 3; i++) { entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells)); } - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); entries = new ArrayList<>(3); cells = new ArrayList<>(); @@ -278,7 +279,7 @@ public void testMixedDeletes() throws Exception { entries.add(createEntry(TABLE_NAME1, 1, KeyValue.Type.DeleteFamily, cells)); entries.add(createEntry(TABLE_NAME1, 2, KeyValue.Type.DeleteColumn, cells)); - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); Scan scan = new Scan(); @@ -293,7 +294,7 @@ public void testMixedDeletes() throws Exception { @Test public void testApplyDeleteBeforePut() throws Exception { List entries = new ArrayList<>(5); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (int i = 0; i < 2; i++) { entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells)); } @@ -301,7 +302,7 @@ public void testApplyDeleteBeforePut() throws Exception { for (int i = 3; i < 5; i++) { entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells)); } - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); Get get = new Get(Bytes.toBytes(1)); Result res = table1.get(get); @@ -312,12 +313,12 @@ public void testApplyDeleteBeforePut() throws Exception { public void testRethrowRetriesExhaustedException() throws Exception { TableName notExistTable = TableName.valueOf("notExistTable"); List entries = new ArrayList<>(); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (int i = 0; i < 10; i++) { entries.add(createEntry(notExistTable, i, KeyValue.Type.Put, cells)); } try { - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); Assert.fail("Should re-throw TableNotFoundException."); } catch (TableNotFoundException e) { @@ -331,8 +332,9 @@ public void testRethrowRetriesExhaustedException() throws Exception { try (Admin admin = conn.getAdmin()) { admin.disableTable(TABLE_NAME1); try { - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), - replicationClusterId, baseNamespaceDir, hfileArchiveDir); + SINK.replicateEntries(entries, + PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, + baseNamespaceDir, hfileArchiveDir); Assert.fail("Should re-throw RetriesExhaustedWithDetailsException."); } catch (RetriesExhaustedException e) { } finally { @@ -412,7 +414,9 @@ public void testReplicateEntriesForHFiles() throws Exception { assertEquals(0, scanner.next(numRows).length); } // 7. Replicate the bulk loaded entry - SINK.replicateEntries(entries, CellUtil.createCellScanner(edit.getCells().iterator()), + SINK.replicateEntries(entries, + PrivateCellUtil + .createExtendedCellScanner(WALEditInternalHelper.getExtendedCells(edit).iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); try (ResultScanner scanner = table1.getScanner(new Scan())) { // 8. Assert data is replicated @@ -429,13 +433,13 @@ public void testFailedReplicationSinkMetrics() throws IOException { long initialFailedBatches = SINK.getSinkMetrics().getFailedBatches(); long errorCount = 0L; List entries = new ArrayList<>(BATCH_SIZE); - List cells = new ArrayList<>(); + List cells = new ArrayList<>(); for (int i = 0; i < BATCH_SIZE; i++) { entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells)); } cells.clear(); // cause IndexOutOfBoundsException try { - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); Assert.fail("Should re-throw ArrayIndexOutOfBoundsException."); } catch (ArrayIndexOutOfBoundsException e) { @@ -450,7 +454,7 @@ public void testFailedReplicationSinkMetrics() throws IOException { entries.add(createEntry(notExistTable, i, KeyValue.Type.Put, cells)); } try { - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), + SINK.replicateEntries(entries, PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir); Assert.fail("Should re-throw TableNotFoundException."); } catch (TableNotFoundException e) { @@ -468,8 +472,9 @@ public void testFailedReplicationSinkMetrics() throws IOException { try (Admin admin = conn.getAdmin()) { admin.disableTable(TABLE_NAME1); try { - SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), - replicationClusterId, baseNamespaceDir, hfileArchiveDir); + SINK.replicateEntries(entries, + PrivateCellUtil.createExtendedCellScanner(cells.iterator()), replicationClusterId, + baseNamespaceDir, hfileArchiveDir); Assert.fail("Should re-throw IOException."); } catch (IOException e) { errorCount++; @@ -481,7 +486,8 @@ public void testFailedReplicationSinkMetrics() throws IOException { } } - private WALEntry createEntry(TableName table, int row, KeyValue.Type type, List cells) { + private WALEntry createEntry(TableName table, int row, KeyValue.Type type, + List cells) { byte[] fam = table.equals(TABLE_NAME1) ? FAM_NAME1 : FAM_NAME2; byte[] rowBytes = Bytes.toBytes(row); // Just make sure we don't get the same ts for two consecutive rows with diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java index d6c7a0250015..11a0e98c5541 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java @@ -28,10 +28,11 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; -import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; @@ -101,7 +102,7 @@ public void testWALEntryFilter() throws IOException { ByteString.copyFromUtf8(TableName.valueOf(this.name.getMethodName()).toString()); // Add WALEdit Cells to Cells List. The way edits arrive at the sink is with protos // describing the edit with all Cells from all edits aggregated in a single CellScanner. - final List cells = new ArrayList<>(); + final List cells = new ArrayList<>(); int count = BOUNDARY * 2; for (int i = 0; i < count; i++) { byte[] bytes = Bytes.toBytes(i); @@ -114,20 +115,21 @@ public void testWALEntryFilter() throws IOException { entryBuilder.setAssociatedCellCount(1); entries.add(entryBuilder.build()); // We need to add a Cell per WALEdit to the cells array. - CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.DEEP_COPY); + ExtendedCellBuilder cellBuilder = + ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY); // Make cells whose row, family, cell, value, and ts are == 'i'. - Cell cell = cellBuilder.setRow(bytes).setFamily(bytes).setQualifier(bytes) + ExtendedCell cell = cellBuilder.setRow(bytes).setFamily(bytes).setQualifier(bytes) .setType(Cell.Type.Put).setTimestamp(i).setValue(bytes).build(); cells.add(cell); } // Now wrap our cells array in a CellScanner that we can pass in to replicateEntries. It has // all Cells from all the WALEntries made above. - CellScanner cellScanner = new CellScanner() { + ExtendedCellScanner cellScanner = new ExtendedCellScanner() { // Set to -1 because advance gets called before current. int index = -1; @Override - public Cell current() { + public ExtendedCell current() { return cells.get(index); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java index dfe39990f4c8..42fb28ccaa0d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java @@ -24,7 +24,8 @@ import java.util.concurrent.ThreadLocalRandom; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.ExtendedCell; +import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.Tag; @@ -74,8 +75,8 @@ public Mutation beforeMutate(long rowkeyBase, Mutation m) throws IOException { numTags = minNumTags + rand.nextInt(maxNumTags - minNumTags); } List tags; - for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { - Cell cell = cellScanner.current(); + for (ExtendedCellScanner cellScanner = m.cellScanner(); cellScanner.advance();) { + ExtendedCell cell = cellScanner.current(); byte[] tag = LoadTestDataGenerator.generateData(rand, minTagLength + rand.nextInt(maxTagLength - minTagLength)); tags = new ArrayList<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.java index 10ce20049c9e..3dca289cb451 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplitToHFile.java @@ -39,12 +39,14 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.ClientInternalHelper; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Get; @@ -341,10 +343,10 @@ public void testRecoverSequenceId() throws Exception { region.put(new Put(Bytes.toBytes(i)).addColumn(cfd.getName(), QUALIFIER, VALUE1)); Result result = region.get(new Get(Bytes.toBytes(i)).addFamily(cfd.getName())); assertTrue(Bytes.equals(VALUE1, result.getValue(cfd.getName(), QUALIFIER))); - List cells = result.listCells(); - assertEquals(1, cells.size()); + ExtendedCell[] cells = ClientInternalHelper.getExtendedRawCells(result); + assertEquals(1, cells.length); seqIdMap.computeIfAbsent(i, r -> new HashMap<>()).put(cfd.getNameAsString(), - cells.get(0).getSequenceId()); + cells[0].getSequenceId()); } } @@ -362,10 +364,9 @@ public void testRecoverSequenceId() throws Exception { for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) { Result result = region2.get(new Get(Bytes.toBytes(i)).addFamily(cfd.getName())); assertTrue(Bytes.equals(VALUE1, result.getValue(cfd.getName(), QUALIFIER))); - List cells = result.listCells(); - assertEquals(1, cells.size()); - assertEquals((long) seqIdMap.get(i).get(cfd.getNameAsString()), - cells.get(0).getSequenceId()); + ExtendedCell[] cells = ClientInternalHelper.getExtendedRawCells(result); + assertEquals(1, cells.length); + assertEquals((long) seqIdMap.get(i).get(cfd.getNameAsString()), cells[0].getSequenceId()); } } }