From 16baa8aa3d92eb9b04a05e1a8353ad9727c5d0a7 Mon Sep 17 00:00:00 2001 From: chandrasekhar-188k <154109917+chandrasekhar-188k@users.noreply.github.com> Date: Sun, 21 Apr 2024 17:15:06 +0530 Subject: [PATCH] HBASE-28497 Missing fields in Get.toJSON (#5800) Signed-off-by: Duo Zhang Signed-off-by: Pankaj Kumar (cherry picked from commit c73f8b51cba518984c48f8ead64b9cde395c54c3) --- .../org/apache/hadoop/hbase/client/Get.java | 21 ++++++ .../hadoop/hbase/client/TestOperation.java | 72 +++++++++++++++++++ 2 files changed, 93 insertions(+) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java index 9d692ceaa509..dd68c57a417a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java @@ -27,6 +27,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import java.util.stream.Collectors; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.TimeRange; @@ -488,6 +489,26 @@ public Map toMap(int maxCols) { if (getId() != null) { map.put("id", getId()); } + map.put("storeLimit", this.storeLimit); + map.put("storeOffset", this.storeOffset); + map.put("checkExistenceOnly", this.checkExistenceOnly); + + map.put("targetReplicaId", this.targetReplicaId); + map.put("consistency", this.consistency); + map.put("loadColumnFamiliesOnDemand", this.loadColumnFamiliesOnDemand); + if (!colFamTimeRangeMap.isEmpty()) { + Map> colFamTimeRangeMapStr = colFamTimeRangeMap.entrySet().stream() + .collect(Collectors.toMap((e) -> Bytes.toStringBinary(e.getKey()), e -> { + TimeRange value = e.getValue(); + List rangeList = new ArrayList<>(); + rangeList.add(value.getMin()); + rangeList.add(value.getMax()); + return rangeList; + })); + + map.put("colFamTimeRangeMap", colFamTimeRangeMapStr); + } + map.put("priority", getPriority()); return map; } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index 6725f161f20f..644062ca9917 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -504,4 +504,76 @@ public void testOperationSubClassMethodsAreBuilderStyle() { BuilderStyleTest.assertClassesAreBuilderStyle(classes); } + + /** + * Test the client Get Operations' JSON encoding to ensure that produced JSON is parseable and + * that the details are present and not corrupted. + * @throws IOException if the JSON conversion fails + */ + @Test + public void testGetOperationToJSON() throws IOException { + // produce a Scan Operation + Get get = new Get(ROW); + get.addColumn(FAMILY, QUALIFIER); + get.readVersions(5); + get.setMaxResultsPerColumnFamily(3); + get.setRowOffsetPerColumnFamily(8); + get.setCacheBlocks(true); + get.setMaxResultsPerColumnFamily(5); + get.setRowOffsetPerColumnFamily(9); + get.setCheckExistenceOnly(true); + get.setTimeRange(1000, 2000); + get.setFilter(SCV_FILTER); + get.setReplicaId(1); + get.setConsistency(Consistency.STRONG); + get.setLoadColumnFamiliesOnDemand(true); + get.setColumnFamilyTimeRange(FAMILY, 2000, 3000); + get.setPriority(10); + + // get its JSON representation, and parse it + String json = get.toJSON(); + Type typeOfHashMap = new TypeToken>() { + }.getType(); + Gson gson = new GsonBuilder().setLongSerializationPolicy(LongSerializationPolicy.STRING) + .setObjectToNumberStrategy(ToNumberPolicy.LONG_OR_DOUBLE).create(); + Map parsedJSON = gson.fromJson(json, typeOfHashMap); + // check for the row + assertEquals("row incorrect in Get.toJSON()", Bytes.toStringBinary(ROW), parsedJSON.get("row")); + // check for the family and the qualifier. + List familyInfo = (List) ((Map) parsedJSON.get("families")).get(Bytes.toStringBinary(FAMILY)); + assertNotNull("Family absent in Get.toJSON()", familyInfo); + assertEquals("Qualifier absent in Get.toJSON()", 1, familyInfo.size()); + assertEquals("Qualifier incorrect in Get.toJSON()", Bytes.toStringBinary(QUALIFIER), + familyInfo.get(0)); + + assertEquals("maxVersions incorrect in Get.toJSON()", 5L, parsedJSON.get("maxVersions")); + + assertEquals("storeLimit incorrect in Get.toJSON()", 5L, parsedJSON.get("storeLimit")); + assertEquals("storeOffset incorrect in Get.toJSON()", 9L, parsedJSON.get("storeOffset")); + + assertEquals("cacheBlocks incorrect in Get.toJSON()", true, parsedJSON.get("cacheBlocks")); + + List trList = (List) parsedJSON.get("timeRange"); + assertEquals("timeRange incorrect in Get.toJSON()", 2, trList.size()); + assertEquals("timeRange incorrect in Get.toJSON()", "1000", trList.get(0)); + assertEquals("timeRange incorrect in Get.toJSON()", "2000", trList.get(1)); + + Map colFamTimeRange = (Map) parsedJSON.get("colFamTimeRangeMap"); + assertEquals("colFamTimeRangeMap incorrect in Get.toJSON()", 1L, colFamTimeRange.size()); + List testFamily = (List) colFamTimeRange.get("testFamily"); + assertEquals("colFamTimeRangeMap incorrect in Get.toJSON()", 2L, testFamily.size()); + assertEquals("colFamTimeRangeMap incorrect in Get.toJSON()", "2000", testFamily.get(0)); + assertEquals("colFamTimeRangeMap incorrect in Get.toJSON()", "3000", testFamily.get(1)); + + assertEquals("targetReplicaId incorrect in Get.toJSON()", 1L, + parsedJSON.get("targetReplicaId")); + assertEquals("consistency incorrect in Get.toJSON()", "STRONG", parsedJSON.get("consistency")); + assertEquals("loadColumnFamiliesOnDemand incorrect in Get.toJSON()", true, + parsedJSON.get("loadColumnFamiliesOnDemand")); + + assertEquals("priority incorrect in Get.toJSON()", 10L, parsedJSON.get("priority")); + assertEquals("checkExistenceOnly incorrect in Get.toJSON()", true, + parsedJSON.get("checkExistenceOnly")); + + } }