Skip to content

Commit

Permalink
HBASE-23238 Additional test and checks for null references on Scanner…
Browse files Browse the repository at this point in the history
…CallableWithReplicas (#780)

Signed-off-by: Sean Busbey <busbey@apache.org>
(cherry picked from commit 577db5d)
  • Loading branch information
wchevreuil committed Nov 1, 2019
1 parent 5521057 commit 18f1aac
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,12 @@ public ScannerCallableWithReplicas(TableName tableName, ClusterConnection cConne
}

public void setClose() {
currentScannerCallable.setClose();
if(currentScannerCallable != null) {
currentScannerCallable.setClose();
} else {
LOG.warn("Calling close on ScannerCallable reference that is already null, "
+ "which shouldn't happen.");
}
}

public void setRenew(boolean val) {
Expand Down Expand Up @@ -136,6 +141,10 @@ public MoreResults moreResultsForScan() {
Result[] r = currentScannerCallable.call(timeout);
currentScannerCallable = null;
return r;
} else if(currentScannerCallable == null) {
LOG.warn("Another call received, but our ScannerCallable is already null. "
+ "This shouldn't happen, but there's not much to do, so logging and returning null.");
return null;
}
// We need to do the following:
//1. When a scan goes out to a certain replica (default or not), we need to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.filter.BinaryComparator;
Expand All @@ -54,7 +57,6 @@
import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.After;
Expand Down Expand Up @@ -945,7 +947,7 @@ public void testReverseScanWithFlush() throws Exception {
final byte[] LARGE_VALUE = generateHugeValue(128 * 1024);

try (Table table = TEST_UTIL.createTable(tableName, FAMILY);
Admin admin = TEST_UTIL.getAdmin()) {
Admin admin = TEST_UTIL.getAdmin()) {
List<Put> putList = new ArrayList<>();
for (long i = 0; i < ROWS_TO_INSERT; i++) {
Put put = new Put(Bytes.toBytes(i));
Expand Down Expand Up @@ -975,7 +977,79 @@ public void testReverseScanWithFlush() throws Exception {
}
}
assertEquals("Expected " + ROWS_TO_INSERT + " rows in the table but it is " + count,
ROWS_TO_INSERT, count);
ROWS_TO_INSERT, count);
}
}

@Test
public void testScannerWithPartialResults() throws Exception {
TableName tableName = TableName.valueOf("testScannerWithPartialResults");
try (Table table = TEST_UTIL.createMultiRegionTable(tableName,
Bytes.toBytes("c"), 4)) {
List<Put> puts = new ArrayList<>();
byte[] largeArray = new byte[10000];
Put put = new Put(Bytes.toBytes("aaaa0"));
put.addColumn(Bytes.toBytes("c"), Bytes.toBytes("1"), Bytes.toBytes("1"));
put.addColumn(Bytes.toBytes("c"), Bytes.toBytes("2"), Bytes.toBytes("2"));
put.addColumn(Bytes.toBytes("c"), Bytes.toBytes("3"), Bytes.toBytes("3"));
put.addColumn(Bytes.toBytes("c"), Bytes.toBytes("4"), Bytes.toBytes("4"));
puts.add(put);
put = new Put(Bytes.toBytes("aaaa1"));
put.addColumn(Bytes.toBytes("c"), Bytes.toBytes("1"), Bytes.toBytes("1"));
put.addColumn(Bytes.toBytes("c"), Bytes.toBytes("2"), largeArray);
put.addColumn(Bytes.toBytes("c"), Bytes.toBytes("3"), largeArray);
puts.add(put);
table.put(puts);
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("c"));
scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, tableName.getName());
scan.setMaxResultSize(10001);
scan.setStopRow(Bytes.toBytes("bbbb"));
scan.setFilter(new LimitKVsReturnFilter());
ResultScanner rs = table.getScanner(scan);
Result result;
int expectedKvNumber = 6;
int returnedKvNumber = 0;
while((result = rs.next()) != null){
returnedKvNumber += result.listCells().size();
}
rs.close();
assertEquals(expectedKvNumber, returnedKvNumber);
}
}

public static class LimitKVsReturnFilter extends FilterBase {

private static int total = 0;

@Override
public ReturnCode filterCell(Cell v) throws IOException {
if(total>=6) {
total++;
return ReturnCode.SKIP;
}
total++;
return ReturnCode.INCLUDE;
}

@Override
public boolean filterAllRemaining() throws IOException {
if(total<7) {
return false;
}
total++;
return true;
}

@Override
public String toString() {
return this.getClass().getSimpleName();
}

public static LimitKVsReturnFilter parseFrom(final byte [] pbBytes)
throws DeserializationException {
return new LimitKVsReturnFilter();
}
}

}

0 comments on commit 18f1aac

Please sign in to comment.