Skip to content

Commit

Permalink
HBASE-28684 Remove CellWrapper and use ExtendedCell internally in cli…
Browse files Browse the repository at this point in the history
…ent side data structure
  • Loading branch information
Apache9 committed Jul 3, 2024
1 parent 0a1f1c4 commit 578337b
Show file tree
Hide file tree
Showing 131 changed files with 1,064 additions and 1,011 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
Expand Down Expand Up @@ -76,7 +77,7 @@ static class HFileCellMapper extends Mapper<NullWritable, Cell, ImmutableBytesWr
public void map(NullWritable key, Cell value, Context context)
throws IOException, InterruptedException {
context.write(new ImmutableBytesWritable(CellUtil.cloneRow(value)),
new MapReduceExtendedCell(value));
new MapReduceExtendedCell(PrivateCellUtil.ensureExtendedCell(value)));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.CellUtil.createCellScanner;
import static org.apache.hadoop.hbase.client.ConnectionUtils.calcPriority;
import static org.apache.hadoop.hbase.client.ConnectionUtils.resetController;
import static org.apache.hadoop.hbase.client.ConnectionUtils.translateException;
Expand All @@ -44,11 +43,12 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ExtendedCellScannable;
import org.apache.hadoop.hbase.HBaseServerException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.RetryImmediatelyException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
Expand Down Expand Up @@ -274,7 +274,7 @@ private void failAll(Stream<Action> actions, int tries) {
}

private ClientProtos.MultiRequest buildReq(Map<byte[], RegionRequest> actionsByRegion,
List<CellScannable> cells, Map<Integer, Integer> indexMap) throws IOException {
List<ExtendedCellScannable> cells, Map<Integer, Integer> indexMap) throws IOException {
ClientProtos.MultiRequest.Builder multiRequestBuilder = ClientProtos.MultiRequest.newBuilder();
ClientProtos.RegionAction.Builder regionActionBuilder = ClientProtos.RegionAction.newBuilder();
ClientProtos.Action.Builder actionBuilder = ClientProtos.Action.newBuilder();
Expand Down Expand Up @@ -382,7 +382,7 @@ private void sendToServer(ServerName serverName, ServerRequest serverReq, int tr
return;
}
ClientProtos.MultiRequest req;
List<CellScannable> cells = new ArrayList<>();
List<ExtendedCellScannable> cells = new ArrayList<>();
// Map from a created RegionAction to the original index for a RowMutations within
// the original list of actions. This will be used to process the results when there
// is RowMutations/CheckAndMutate in the action list.
Expand All @@ -398,7 +398,7 @@ private void sendToServer(ServerName serverName, ServerRequest serverReq, int tr
calcPriority(serverReq.getPriority(), tableName), tableName);
controller.setRequestAttributes(requestAttributes);
if (!cells.isEmpty()) {
controller.setCellScanner(createCellScanner(cells));
controller.setCellScanner(PrivateCellUtil.createExtendedCellScanner(cells));
}
stub.multi(controller, req, resp -> {
if (controller.failed()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.security.access.Permission;
Expand Down Expand Up @@ -171,10 +172,8 @@ public Delete addFamily(final byte[] family) {
* @return this for invocation chaining
*/
public Delete addFamily(final byte[] family, final long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = getCellList(family);
checkTimestamp(ts);
List<ExtendedCell> list = getCellList(family);
if (!list.isEmpty()) {
list.clear();
}
Expand All @@ -190,10 +189,8 @@ public Delete addFamily(final byte[] family, final long timestamp) {
* @return this for invocation chaining
*/
public Delete addFamilyVersion(final byte[] family, final long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = getCellList(family);
checkTimestamp(ts);
List<ExtendedCell> list = getCellList(family);
list.add(new KeyValue(row, family, null, timestamp, KeyValue.Type.DeleteFamilyVersion));
return this;
}
Expand All @@ -218,10 +215,8 @@ public Delete addColumns(final byte[] family, final byte[] qualifier) {
* @return this for invocation chaining
*/
public Delete addColumns(final byte[] family, final byte[] qualifier, final long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = getCellList(family);
checkTimestamp(ts);
List<ExtendedCell> list = getCellList(family);
list.add(new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.DeleteColumn));
return this;
}
Expand All @@ -247,10 +242,8 @@ public Delete addColumn(final byte[] family, final byte[] qualifier) {
* @return this for invocation chaining
*/
public Delete addColumn(byte[] family, byte[] qualifier, long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = getCellList(family);
checkTimestamp(ts);
List<ExtendedCell> list = getCellList(family);
KeyValue kv = new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.Delete);
list.add(kv);
return this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,14 @@
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Objects;
import java.util.TreeMap;
import java.util.UUID;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.security.access.Permission;
Expand Down Expand Up @@ -114,10 +116,8 @@ public Increment add(Cell cell) throws IOException {
* @return the Increment object
*/
public Increment addColumn(byte[] family, byte[] qualifier, long amount) {
if (family == null) {
throw new IllegalArgumentException("family cannot be null");
}
List<Cell> list = getCellList(family);
Objects.requireNonNull(family, "family cannot be null");
List<ExtendedCell> list = getCellList(family);
KeyValue kv = createPutKeyValue(family, qualifier, ts, Bytes.toBytes(amount));
list.add(kv);
return this;
Expand Down Expand Up @@ -224,7 +224,7 @@ public String toString() {
}
sb.append(", families=");
boolean moreThanOne = false;
for (Map.Entry<byte[], List<Cell>> entry : this.familyMap.entrySet()) {
for (Map.Entry<byte[], List<ExtendedCell>> entry : this.familyMap.entrySet()) {
if (moreThanOne) {
sb.append("), ");
} else {
Expand Down
Loading

0 comments on commit 578337b

Please sign in to comment.