Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -297,10 +297,10 @@ org.apache.curator:curator-client:5.2.0
org.apache.curator:curator-framework:5.2.0
org.apache.curator:curator-recipes:5.2.0
org.apache.geronimo.specs:geronimo-jcache_1.0_spec:1.0-alpha-1
org.apache.hbase:hbase-annotations:1.4.8
org.apache.hbase:hbase-client:1.4.8
org.apache.hbase:hbase-common:1.4.8
org.apache.hbase:hbase-protocol:1.4.8
org.apache.hbase:hbase-annotations:1.7.1
org.apache.hbase:hbase-client:1.7.1
org.apache.hbase:hbase-common:1.7.1
org.apache.hbase:hbase-protocol:1.7.1
org.apache.htrace:htrace-core:3.1.0-incubating
org.apache.htrace:htrace-core4:4.1.0-incubating
org.apache.httpcomponents:httpclient:4.5.6
Expand Down
4 changes: 2 additions & 2 deletions hadoop-project/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@

<swagger-annotations-version>1.5.4</swagger-annotations-version>
<snakeyaml.version>1.26</snakeyaml.version>
<hbase.one.version>1.4.8</hbase.one.version>
<hbase.one.version>1.7.1</hbase.one.version>
<hbase.two.version>2.0.2</hbase.two.version>
<junit.version>4.13.2</junit.version>
<junit.jupiter.version>5.5.1</junit.jupiter.version>
Expand Down Expand Up @@ -2393,7 +2393,7 @@
</activation>
<properties>
<hbase.version>${hbase.one.version}</hbase.version>
<hbase-compatible-hadoop.version>2.5.1</hbase-compatible-hadoop.version>
<hbase-compatible-hadoop.version>2.8.5</hbase-compatible-hadoop.version>
<hbase-compatible-guava.version>12.0.1</hbase-compatible-guava.version>
<hbase-server-artifactid>hadoop-yarn-server-timelineservice-hbase-server-1</hbase-server-artifactid>
</properties>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,10 @@
<groupId>tomcat</groupId>
<artifactId>jasper-runtime</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
</exclusion>
</exclusions>
</dependency>

Expand All @@ -106,6 +110,12 @@
<artifactId>hadoop-auth</artifactId>
<version>${hbase-compatible-hadoop.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
Expand All @@ -117,6 +127,10 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
</exclusion>
</exclusions>
</dependency>

Expand Down Expand Up @@ -311,6 +325,12 @@
<artifactId>hadoop-hdfs</artifactId>
<version>${hbase-compatible-hadoop.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
</exclusion>
</exclusions>
</dependency>

<!-- 'mvn dependency:analyze' fails to detect use of this direct
Expand All @@ -321,6 +341,19 @@
<version>${hbase-compatible-hadoop.version}</version>
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
</exclusion>
</exclusions>
</dependency>

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<version>${hbase-compatible-hadoop.version}</version>
<scope>test</scope>
</dependency>

<dependency>
Expand Down Expand Up @@ -470,14 +503,6 @@
</exclusion>
</exclusions>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this direct
dependency -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
<version>${hbase-compatible-hadoop.version}</version>
<scope>test</scope>
</dependency>
<!-- 'mvn dependency:analyze' fails to detect use of this direct
dependency -->
<!-- This is needed by HBaseTestingUtility -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -412,15 +412,15 @@ protected ResultScanner getResults(Configuration hbaseConf,
}

// set start row
scan.setStartRow(applicationRowKey.getRowKey());
scan.withStartRow(applicationRowKey.getRowKey());

// get the bytes for stop row
applicationRowKeyPrefix = new ApplicationRowKeyPrefix(
context.getClusterId(), context.getUserId(), context.getFlowName(),
context.getFlowRunId());

// set stop row
scan.setStopRow(
scan.withStopRow(
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
applicationRowKeyPrefix.getRowKeyPrefix()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,9 @@ private static byte[] getNextRowKey(byte[] currRowKeyPrefix,
private ResultScanner getResult(Configuration hbaseConf, Connection conn,
FilterList filterList, byte[] startPrefix, byte[] endPrefix)
throws IOException {
Scan scan = new Scan(startPrefix, endPrefix);
Scan scan = new Scan();
scan.withStartRow(startPrefix);
scan.withStopRow(endPrefix);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: it would be the best it make it as the following:
scan.withStartRow()
.withStopRow()
.setFilter()
.setSmall()

scan.setFilter(filterList);
scan.setSmall(true);
return ENTITY_TABLE.getResultScanner(hbaseConf, conn, scan);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,16 +133,16 @@ && getFilters().getCreatedTimeEnd() == Long.MAX_VALUE) {
throw new BadRequestException(
"fromid doesn't belong to clusterId=" + clusterId);
}
scan.setStartRow(key.getRowKey());
scan.setStopRow(
scan.withStartRow(key.getRowKey());
scan.withStopRow(
new FlowActivityRowKeyPrefix(clusterId,
(getFilters().getCreatedTimeBegin() <= 0 ? 0
: (getFilters().getCreatedTimeBegin() - 1)))
.getRowKeyPrefix());
} else {
scan.setStartRow(new FlowActivityRowKeyPrefix(clusterId, getFilters()
scan.withStartRow(new FlowActivityRowKeyPrefix(clusterId, getFilters()
.getCreatedTimeEnd()).getRowKeyPrefix());
scan.setStopRow(new FlowActivityRowKeyPrefix(clusterId, (getFilters()
scan.withStopRow(new FlowActivityRowKeyPrefix(clusterId, (getFilters()
.getCreatedTimeBegin() <= 0 ? 0
: (getFilters().getCreatedTimeBegin() - 1))).getRowKeyPrefix());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,14 +241,14 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
"fromid doesn't belong to clusterId=" + context.getClusterId());
}
// set start row
scan.setStartRow(flowRunRowKey.getRowKey());
scan.withStartRow(flowRunRowKey.getRowKey());

// get the bytes for stop row
flowRunRowKeyPrefix = new FlowRunRowKeyPrefix(context.getClusterId(),
context.getUserId(), context.getFlowName());

// set stop row
scan.setStopRow(
scan.withStopRow(
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
flowRunRowKeyPrefix.getRowKeyPrefix()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -519,15 +519,15 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
}

// set start row
scan.setStartRow(entityRowKey.getRowKey());
scan.withStartRow(entityRowKey.getRowKey());

// get the bytes for stop row
entityRowKeyPrefix = new EntityRowKeyPrefix(context.getClusterId(),
context.getUserId(), context.getFlowName(), context.getFlowRunId(),
context.getAppId(), context.getEntityType(), null, null);

// set stop row
scan.setStopRow(
scan.withStopRow(
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
entityRowKeyPrefix.getRowKeyPrefix()));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -372,15 +372,15 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
}

// set start row
scan.setStartRow(entityRowKey.getRowKey());
scan.withStartRow(entityRowKey.getRowKey());

// get the bytes for stop row
subApplicationRowKeyPrefix = new SubApplicationRowKeyPrefix(
context.getDoAsUser(), context.getClusterId(),
context.getEntityType(), null, null, null);

// set stop row
scan.setStopRow(
scan.withStopRow(
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
subApplicationRowKeyPrefix.getRowKeyPrefix()));

Expand Down