Skip to content

Commit

Permalink
HBASE-26724 Backport the UT changes in HBASE-24510 to branch-2.x (#4081)
Browse files Browse the repository at this point in the history
Signed-off-by: Xin Sun <ddupgs@gmail.com>
  • Loading branch information
Apache9 committed Feb 9, 2022
1 parent 4e15101 commit 12002e6
Show file tree
Hide file tree
Showing 11 changed files with 525 additions and 442 deletions.
135 changes: 125 additions & 10 deletions hbase-server/src/test/java/org/apache/hadoop/hbase/HTestConst.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,21 @@
*/
package org.apache.hadoop.hbase;

import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.Collections;

import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionAsTable;
import org.apache.hadoop.hbase.util.Bytes;

/**
* Similar to {@link HConstants} but for tests. Also provides some simple
* static utility functions to generate test data.
* Similar to {@link HConstants} but for tests. Also provides some simple static utility functions
* to generate test data.
*/
public class HTestConst {

Expand All @@ -34,15 +39,13 @@ private HTestConst() {

public static final String DEFAULT_TABLE_STR = "MyTestTable";
public static final byte[] DEFAULT_TABLE_BYTES = Bytes.toBytes(DEFAULT_TABLE_STR);
public static final TableName DEFAULT_TABLE =
TableName.valueOf(DEFAULT_TABLE_BYTES);
public static final TableName DEFAULT_TABLE = TableName.valueOf(DEFAULT_TABLE_BYTES);

public static final String DEFAULT_CF_STR = "MyDefaultCF";
public static final byte[] DEFAULT_CF_BYTES = Bytes.toBytes(DEFAULT_CF_STR);

public static final Set<String> DEFAULT_CF_STR_SET =
Collections.unmodifiableSet(new HashSet<>(
Arrays.asList(new String[] { DEFAULT_CF_STR })));
Collections.unmodifiableSet(new HashSet<>(Arrays.asList(new String[] { DEFAULT_CF_STR })));

public static final String DEFAULT_ROW_STR = "MyTestRow";
public static final byte[] DEFAULT_ROW_BYTES = Bytes.toBytes(DEFAULT_ROW_STR);
Expand All @@ -53,17 +56,129 @@ private HTestConst() {
public static String DEFAULT_VALUE_STR = "MyTestValue";
public static byte[] DEFAULT_VALUE_BYTES = Bytes.toBytes(DEFAULT_VALUE_STR);

private static final char FIRST_CHAR = 'a';
private static final char LAST_CHAR = 'z';
private static final byte[] START_KEY_BYTES = { FIRST_CHAR, FIRST_CHAR, FIRST_CHAR };

/**
* Generate the given number of unique byte sequences by appending numeric
* suffixes (ASCII representations of decimal numbers).
* Generate the given number of unique byte sequences by appending numeric suffixes (ASCII
* representations of decimal numbers).
*/
public static byte[][] makeNAscii(byte[] base, int n) {
byte [][] ret = new byte[n][];
byte[][] ret = new byte[n][];
for (int i = 0; i < n; i++) {
byte[] tail = Bytes.toBytes(Integer.toString(i));
ret[i] = Bytes.add(base, tail);
}
return ret;
}

/**
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
* from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
*/
public static long addContent(final Region r, final byte[] columnFamily, final byte[] column)
throws IOException {
byte[] startKey = r.getRegionInfo().getStartKey();
byte[] endKey = r.getRegionInfo().getEndKey();
byte[] startKeyBytes = startKey;
if (startKeyBytes == null || startKeyBytes.length == 0) {
startKeyBytes = START_KEY_BYTES;
}
return addContent(new RegionAsTable(r), Bytes.toString(columnFamily), Bytes.toString(column),
startKeyBytes, endKey, -1);
}

public static long addContent(final Region r, final byte[] columnFamily) throws IOException {
return addContent(r, columnFamily, null);
}

/**
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
* from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
*/
public static long addContent(Table updater, String columnFamily) throws IOException {
return addContent(updater, columnFamily, START_KEY_BYTES, null);
}

public static long addContent(Table updater, String family, String column) throws IOException {
return addContent(updater, family, column, START_KEY_BYTES, null);
}

/**
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
* from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
*/
public static long addContent(Table updater, String columnFamily, byte[] startKeyBytes,
byte[] endKey) throws IOException {
return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1);
}

public static long addContent(Table updater, String family, String column, byte[] startKeyBytes,
byte[] endKey) throws IOException {
return addContent(updater, family, column, startKeyBytes, endKey, -1);
}

/**
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
* from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
*/
public static long addContent(Table updater, String columnFamily, String column,
byte[] startKeyBytes, byte[] endKey, long ts) throws IOException {
long count = 0;
// Add rows of three characters. The first character starts with the
// 'a' character and runs up to 'z'. Per first character, we run the
// second character over same range. And same for the third so rows
// (and values) look like this: 'aaa', 'aab', 'aac', etc.
char secondCharStart = (char) startKeyBytes[1];
char thirdCharStart = (char) startKeyBytes[2];
EXIT: for (char c = (char) startKeyBytes[0]; c <= LAST_CHAR; c++) {
for (char d = secondCharStart; d <= LAST_CHAR; d++) {
for (char e = thirdCharStart; e <= LAST_CHAR; e++) {
byte[] t = new byte[] { (byte) c, (byte) d, (byte) e };
if (endKey != null && endKey.length > 0 && Bytes.compareTo(endKey, t) <= 0) {
break EXIT;
}
Put put;
if (ts != -1) {
put = new Put(t, ts);
} else {
put = new Put(t);
}
StringBuilder sb = new StringBuilder();
if (column != null && column.contains(":")) {
sb.append(column);
} else {
if (columnFamily != null) {
sb.append(columnFamily);
if (!columnFamily.endsWith(":")) {
sb.append(":");
}
if (column != null) {
sb.append(column);
}
}
}
byte[][] split = CellUtil.parseColumn(Bytes.toBytes(sb.toString()));
if (split.length == 1) {
byte[] qualifier = new byte[0];
put.addColumn(split[0], qualifier, t);
} else {
put.addColumn(split[0], split[1], t);
}
put.setDurability(Durability.SKIP_WAL);
updater.put(put);
count++;
}
// Set start character back to FIRST_CHAR after we've done first loop.
thirdCharStart = FIRST_CHAR;
}
secondCharStart = FIRST_CHAR;
}
return count;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
*/
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.HBaseTestCase.assertByteEquals;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan;
import static org.junit.Assert.assertArrayEquals;

import java.io.IOException;
import java.nio.ByteBuffer;
Expand Down Expand Up @@ -168,7 +168,7 @@ public void testBasicGetValue() throws Exception {
for (int i = 0; i < 100; ++i) {
final byte[] qf = Bytes.toBytes(i);

assertByteEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
assertArrayEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
assertTrue(r.containsColumn(family, qf));
}
}
Expand All @@ -187,7 +187,7 @@ public void testMultiVersionGetValue() throws Exception {
for (int i = 0; i < 100; ++i) {
final byte[] qf = Bytes.toBytes(i);

assertByteEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
assertArrayEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
assertTrue(r.containsColumn(family, qf));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.RegionInfo;
Expand Down Expand Up @@ -292,7 +292,7 @@ public void testSharedData() throws IOException {
HRegion region = initHRegion(tableName, name.getMethodName(), hc, new Class<?>[]{}, families);

for (int i = 0; i < 3; i++) {
HBaseTestCase.addContent(region, fam3);
HTestConst.addContent(region, fam3);
region.flush(true);
}

Expand Down Expand Up @@ -354,7 +354,7 @@ public void testCoprocessorInterface() throws IOException {
HRegion region = initHRegion(tableName, name.getMethodName(), hc,
new Class<?>[]{CoprocessorImpl.class}, families);
for (int i = 0; i < 3; i++) {
HBaseTestCase.addContent(region, fam3);
HTestConst.addContent(region, fam3);
region.flush(true);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,11 @@
*/
package org.apache.hadoop.hbase.regionserver;

import static org.apache.hadoop.hbase.HBaseTestCase.addContent;
import static org.apache.hadoop.hbase.HTestConst.addContent;
import static org.junit.Assert.assertEquals;

import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
Expand All @@ -50,11 +50,11 @@
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
Expand Down Expand Up @@ -175,7 +175,7 @@ public void testInterruptCompactionBySize() throws Exception {
for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
}
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p);
r.flush(true);
}
Expand Down Expand Up @@ -251,7 +251,7 @@ public void testInterruptCompactionByTime() throws Exception {
for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
}
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p);
r.flush(true);
}
Expand Down Expand Up @@ -330,7 +330,7 @@ private void createStoreFile(final HRegion region) throws IOException {

private void createStoreFile(final HRegion region, String family) throws IOException {
Table loader = new RegionAsTable(region);
HBaseTestCase.addContent(loader, family);
HTestConst.addContent(loader, family);
region.flush(true);
}

Expand Down Expand Up @@ -494,7 +494,7 @@ public void testStopStartCompaction() throws IOException {
for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
}
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p);
r.flush(true);
}
Expand Down
Loading

0 comments on commit 12002e6

Please sign in to comment.