Skip to content

Commit

Permalink
fix test
Browse files Browse the repository at this point in the history
  • Loading branch information
Apache9 committed Jun 11, 2024
1 parent 3290962 commit ca0a645
Show file tree
Hide file tree
Showing 14 changed files with 44 additions and 53 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1649,9 +1649,9 @@ public boolean prefetchStarted() {

/**
* Create a Scanner on this file. No seeks or reads are done on creation. Call
* {@link HFileScanner#seekTo(Cell)} to position an start the read. There is nothing to clean up
* in a Scanner. Letting go of your references to the scanner is sufficient. NOTE: Do not use this
* overload of getScanner for compactions. See
* {@link HFileScanner#seekTo(ExtendedCell)} to position an start the read. There is nothing to
* clean up in a Scanner. Letting go of your references to the scanner is sufficient. NOTE: Do not
* use this overload of getScanner for compactions. See
* {@link #getScanner(Configuration, boolean, boolean, boolean)}
* @param conf Store configuration.
* @param cacheBlocks True if we should cache blocks read in by this scanner.
Expand All @@ -1666,8 +1666,8 @@ public HFileScanner getScanner(Configuration conf, boolean cacheBlocks, final bo

/**
* Create a Scanner on this file. No seeks or reads are done on creation. Call
* {@link HFileScanner#seekTo(Cell)} to position an start the read. There is nothing to clean up
* in a Scanner. Letting go of your references to the scanner is sufficient.
* {@link HFileScanner#seekTo(ExtendedCell)} to position an start the read. There is nothing to
* clean up in a Scanner. Letting go of your references to the scanner is sufficient.
* @param conf Store configuration.
* @param cacheBlocks True if we should cache blocks read in by this scanner.
* @param pread Use positional read rather than seek+read if true (pread is better for
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.function.IntConsumer;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.regionserver.Shipper;
import org.apache.yetus.audience.InterfaceAudience;
Expand All @@ -31,7 +30,7 @@
* reposition yourself as well.
* <p>
* A scanner doesn't always have a key/value that it is pointing to when it is first created and
* before {@link #seekTo()}/{@link #seekTo(Cell)} are called. In this case,
* before {@link #seekTo()}/{@link #seekTo(ExtendedCell)} are called. In this case,
* {@link #getKey()}/{@link #getValue()} returns null. At most other times, a key and value will be
* available. The general pattern is that you position the Scanner using the seekTo variants and
* then getKey and getValue.
Expand Down Expand Up @@ -90,29 +89,30 @@ public interface HFileScanner extends Shipper, Closeable {
boolean next() throws IOException;

/**
* Gets the current key in the form of a cell. You must call {@link #seekTo(Cell)} before this
* method.
* Gets the current key in the form of a cell. You must call {@link #seekTo(ExtendedCell)} before
* this method.
* @return gets the current key as a Cell.
*/
ExtendedCell getKey();

/**
* Gets a buffer view to the current value. You must call {@link #seekTo(Cell)} before this
* method.
* Gets a buffer view to the current value. You must call {@link #seekTo(ExtendedCell)} before
* this method.
* @return byte buffer for the value. The limit is set to the value size, and the position is 0,
* the start of the buffer view.
*/
ByteBuffer getValue();

/** Returns Instance of {@link org.apache.hadoop.hbase.Cell}. */
/** Returns Instance of {@link ExtendedCell}. */
ExtendedCell getCell();

/** Returns Reader that underlies this Scanner instance. */
HFile.Reader getReader();

/**
* @return True is scanner has had one of the seek calls invoked; i.e. {@link #seekBefore(Cell)}
* or {@link #seekTo()} or {@link #seekTo(Cell)}. Otherwise returns false.
* @return True is scanner has had one of the seek calls invoked; i.e.
* {@link #seekBefore(ExtendedCell)} or {@link #seekTo()} or
* {@link #seekTo(ExtendedCell)}. Otherwise returns false.
*/
boolean isSeeked();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,14 @@
import com.google.errorprone.annotations.RestrictedApi;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.nio.RefCnt;
import org.apache.yetus.audience.InterfaceAudience;

/**
* A MemStoreLAB implementation which wraps N MemStoreLABs. Its main duty is in proper managing the
* close of the individual MemStoreLAB. This is treated as an immutable one and so do not allow to
* add any more Cells into it. {@link #copyCellInto(Cell)} throws Exception
* add any more Cells into it. {@link #copyCellInto(ExtendedCell))} throws Exception
*/
@InterfaceAudience.Private
public class ImmutableMemStoreLAB implements MemStoreLAB {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,9 @@ public void close() {
* As individual scanners may run past their ends, those scanners are automatically closed and
* removed from the heap.
* <p>
* This function (and {@link #reseek(Cell)}) does not do multi-column Bloom filter and lazy-seek
* optimizations. To enable those, call {@link #requestSeek(Cell, boolean, boolean)}.
* This function (and {@link #reseek(ExtendedCell)}) does not do multi-column Bloom filter and
* lazy-seek optimizations. To enable those, call
* {@link #requestSeek(ExtendedCell, boolean, boolean)}.
* @param seekKey KeyValue to seek at or after
* @return true if KeyValues exist at or after specified key, false if not
*/
Expand All @@ -249,7 +250,7 @@ public boolean seek(ExtendedCell seekKey) throws IOException {
}

/**
* This function is identical to the {@link #seek(Cell)} function except that
* This function is identical to the {@link #seek(ExtendedCell)} function except that
* scanner.seek(seekKey) is changed to scanner.reseek(seekKey).
*/
@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.regionserver;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
Expand All @@ -39,8 +38,8 @@
* collection occurs.
* <p>
* This manages the large sized chunks. When Cells are to be added to Memstore, MemStoreLAB's
* {@link #copyCellInto(Cell)} gets called. This allocates enough size in the chunk to hold this
* cell's data and copies into this area and then recreate a Cell over this copied data.
* {@link #copyCellInto(ExtendedCell)} gets called. This allocates enough size in the chunk to hold
* this cell's data and copies into this area and then recreate a Cell over this copied data.
* <p>
* @see ChunkCreator
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.apache.hadoop.hbase.ByteBufferExtendedCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.nio.RefCnt;
import org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType;
import org.apache.yetus.audience.InterfaceAudience;
Expand Down Expand Up @@ -219,14 +218,7 @@ private ExtendedCell copyCellInto(ExtendedCell cell, int maxAlloc) {
private static ExtendedCell copyToChunkCell(ExtendedCell cell, ByteBuffer buf, int offset,
int len) {
int tagsLen = cell.getTagsLength();
if (cell instanceof ExtendedCell) {
((ExtendedCell) cell).write(buf, offset);
} else {
// Normally all Cell impls within Server will be of type ExtendedCell. Just considering the
// other case also. The data fragments within Cell is copied into buf as in KeyValue
// serialization format only.
KeyValueUtil.appendTo(cell, buf, offset, true);
}
cell.write(buf, offset);
return createChunkCell(buf, offset, len, tagsLen, cell.getSequenceId());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -928,7 +928,7 @@ protected boolean trySkipToNextRow(ExtendedCell cell) throws IOException {
}

/**
* See {@link org.apache.hadoop.hbase.regionserver.StoreScanner#trySkipToNextRow(Cell)}
* See {@link #trySkipToNextRow(ExtendedCell)}
* @param cell current cell
* @return true means skip to next column, false means not
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.regionserver.querymatcher;

import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.regionserver.ShipperListener;
import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode;
Expand Down Expand Up @@ -54,9 +53,10 @@ public interface ColumnTracker extends ShipperListener {
/**
* Checks if the column is present in the list of requested columns by returning the match code
* instance. It does not check against the number of versions for the columns asked for. To do the
* version check, one has to call {@link #checkVersions(Cell, long, byte, boolean)} method based
* on the return type (INCLUDE) of this method. The values that can be returned by this method are
* {@link MatchCode#INCLUDE}, {@link MatchCode#SEEK_NEXT_COL} and {@link MatchCode#SEEK_NEXT_ROW}.
* version check, one has to call {@link #checkVersions(ExtendedCell, long, byte, boolean)} method
* based on the return type (INCLUDE) of this method. The values that can be returned by this
* method are {@link MatchCode#INCLUDE}, {@link MatchCode#SEEK_NEXT_COL} and
* {@link MatchCode#SEEK_NEXT_ROW}.
* @param cell a cell with the column to match against
* @param type The type of the Cell
* @return The match code instance.
Expand All @@ -68,11 +68,11 @@ public interface ColumnTracker extends ShipperListener {
/**
* Keeps track of the number of versions for the columns asked for. It assumes that the user has
* already checked if the cell needs to be included by calling the
* {@link #checkColumn(Cell, byte)} method. The enum values returned by this method are
* {@link #checkColumn(ExtendedCell, byte)} method. The enum values returned by this method are
* {@link MatchCode#SKIP}, {@link MatchCode#INCLUDE}, {@link MatchCode#INCLUDE_AND_SEEK_NEXT_COL}
* and {@link MatchCode#INCLUDE_AND_SEEK_NEXT_ROW}. Implementations which include all the columns
* could just return {@link MatchCode#INCLUDE} in the {@link #checkColumn(Cell, byte)} method and
* perform all the operations in this checkVersions method.
* could just return {@link MatchCode#INCLUDE} in the {@link #checkColumn(ExtendedCell, byte)}
* method and perform all the operations in this checkVersions method.
* @param cell a cell with the column to match against
* @param timestamp The timestamp of the cell.
* @param type the type of the key value (Put/Delete)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -341,23 +341,23 @@ public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
}
}
if (visibilityTags != null) {
List<Cell> updatedCells = new ArrayList<>();
List<ExtendedCell> updatedCells = new ArrayList<>();
for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
Cell ce = cellScanner.current();
assert ce instanceof ExtendedCell;
ExtendedCell cell = (ExtendedCell) c;
ExtendedCell cell = (ExtendedCell) ce;
List<Tag> tags = PrivateCellUtil.getTags(cell);
if (modifiedTagFound) {
// Rewrite the tags by removing the modified tags.
removeReplicationVisibilityTag(tags);
}
tags.addAll(visibilityTags);
Cell updatedCell = PrivateCellUtil.createCell(cell, tags);
ExtendedCell updatedCell = PrivateCellUtil.createCell(cell, tags);
updatedCells.add(updatedCell);
}
m.getFamilyCellMap().clear();
// Clear and add new Cells to the Mutation.
for (Cell cell : updatedCells) {
for (ExtendedCell cell : updatedCells) {
if (m instanceof Put) {
Put p = (Put) m;
p.add(cell);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public void writeBloom(ExtendedCell cell) throws IOException {
}
}

private void sanityCheck(Cell cell) throws IOException {
private void sanityCheck(ExtendedCell cell) throws IOException {
if (this.getLastCell() != null) {
if (comparator.compare(cell, this.getLastCell()) <= 0) {
throw new IOException("Added a key not lexically larger than" + " previous. Current cell = "
Expand All @@ -72,5 +72,5 @@ private void sanityCheck(Cell cell) throws IOException {
* @param cell the cell to be verified
* @return true if a new key else false
*/
protected abstract boolean isNewKey(Cell cell);
protected abstract boolean isNewKey(ExtendedCell cell);
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@
import static org.apache.hadoop.hbase.regionserver.HStoreFile.LAST_BLOOM_KEY;

import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
import org.apache.yetus.audience.InterfaceAudience;

Expand All @@ -45,7 +45,7 @@ public void addLastBloomKey(Writer writer) throws IOException {
}

@Override
protected boolean isNewKey(Cell cell) {
protected boolean isNewKey(ExtendedCell cell) {
if (this.getLastCell() != null) {
return !CellUtil.matchingRows(cell, this.getLastCell());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import static org.apache.hadoop.hbase.regionserver.HStoreFile.LAST_BLOOM_KEY;

import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
Expand Down Expand Up @@ -50,7 +49,7 @@ public void addLastBloomKey(Writer writer) throws IOException {
}

@Override
protected boolean isNewKey(Cell cell) {
protected boolean isNewKey(ExtendedCell cell) {
if (this.getLastCell() != null) {
return !CellUtil.matchingRowColumn(cell, this.getLastCell());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,16 @@ public RowPrefixFixedLengthBloomContext(BloomFilterWriter bloomFilterWriter,
this.prefixLength = prefixLength;
}

public void writeBloom(Cell cell) throws IOException {
@Override
public void writeBloom(ExtendedCell cell) throws IOException {
super.writeBloom(getRowPrefixCell(cell));
}

/**
* @param cell the cell
* @return the new cell created by row prefix
*/
private ExtendedCell getRowPrefixCell(Cell cell) {
private ExtendedCell getRowPrefixCell(ExtendedCell cell) {
byte[] row = CellUtil.copyRow(cell);
return ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setRow(row, 0, Math.min(prefixLength, row.length)).setType(Cell.Type.Put).build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,9 @@ public static Object[] data() {

private static final int NUM_OF_CELLS = 4;
private static final int SMALL_CHUNK_SIZE = 64;
private ExtendedCell ascCells[];
private ExtendedCell[] ascCells;
private CellArrayMap<ExtendedCell> ascCbOnHeap;
private ExtendedCell descCells[];
private ExtendedCell[] descCells;
private CellArrayMap<ExtendedCell> descCbOnHeap;
private final static Configuration CONF = new Configuration();
private KeyValue lowerOuterCell;
Expand Down

0 comments on commit ca0a645

Please sign in to comment.