HADOOP-18025. Upgrade HBase version to 1.7.1 for hbase1 profile (#3722)
This commit is contained in:
parent
df4197592f
commit
b34dcb5b3a
@ -297,10 +297,10 @@ org.apache.curator:curator-client:5.2.0
|
||||
org.apache.curator:curator-framework:5.2.0
|
||||
org.apache.curator:curator-recipes:5.2.0
|
||||
org.apache.geronimo.specs:geronimo-jcache_1.0_spec:1.0-alpha-1
|
||||
org.apache.hbase:hbase-annotations:1.4.8
|
||||
org.apache.hbase:hbase-client:1.4.8
|
||||
org.apache.hbase:hbase-common:1.4.8
|
||||
org.apache.hbase:hbase-protocol:1.4.8
|
||||
org.apache.hbase:hbase-annotations:1.7.1
|
||||
org.apache.hbase:hbase-client:1.7.1
|
||||
org.apache.hbase:hbase-common:1.7.1
|
||||
org.apache.hbase:hbase-protocol:1.7.1
|
||||
org.apache.htrace:htrace-core:3.1.0-incubating
|
||||
org.apache.htrace:htrace-core4:4.1.0-incubating
|
||||
org.apache.httpcomponents:httpclient:4.5.6
|
||||
|
@ -196,7 +196,7 @@
|
||||
|
||||
<swagger-annotations-version>1.5.4</swagger-annotations-version>
|
||||
<snakeyaml.version>1.26</snakeyaml.version>
|
||||
<hbase.one.version>1.4.8</hbase.one.version>
|
||||
<hbase.one.version>1.7.1</hbase.one.version>
|
||||
<hbase.two.version>2.0.2</hbase.two.version>
|
||||
<junit.version>4.13.2</junit.version>
|
||||
<junit.jupiter.version>5.5.1</junit.jupiter.version>
|
||||
@ -2393,7 +2393,7 @@
|
||||
</activation>
|
||||
<properties>
|
||||
<hbase.version>${hbase.one.version}</hbase.version>
|
||||
<hbase-compatible-hadoop.version>2.5.1</hbase-compatible-hadoop.version>
|
||||
<hbase-compatible-hadoop.version>2.8.5</hbase-compatible-hadoop.version>
|
||||
<hbase-compatible-guava.version>12.0.1</hbase-compatible-guava.version>
|
||||
<hbase-server-artifactid>hadoop-yarn-server-timelineservice-hbase-server-1</hbase-server-artifactid>
|
||||
</properties>
|
||||
|
@ -96,6 +96,10 @@
|
||||
<groupId>tomcat</groupId>
|
||||
<artifactId>jasper-runtime</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@ -106,6 +110,12 @@
|
||||
<artifactId>hadoop-auth</artifactId>
|
||||
<version>${hbase-compatible-hadoop.version}</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@ -117,6 +127,10 @@
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@ -311,6 +325,12 @@
|
||||
<artifactId>hadoop-hdfs</artifactId>
|
||||
<version>${hbase-compatible-hadoop.version}</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this direct
|
||||
@ -321,6 +341,19 @@
|
||||
<version>${hbase-compatible-hadoop.version}</version>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
<version>${hbase-compatible-hadoop.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@ -470,14 +503,6 @@
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this direct
|
||||
dependency -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
<version>${hbase-compatible-hadoop.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this direct
|
||||
dependency -->
|
||||
<!-- This is needed by HBaseTestingUtility -->
|
||||
|
@ -412,7 +412,7 @@ protected ResultScanner getResults(Configuration hbaseConf,
|
||||
}
|
||||
|
||||
// set start row
|
||||
scan.setStartRow(applicationRowKey.getRowKey());
|
||||
scan.withStartRow(applicationRowKey.getRowKey());
|
||||
|
||||
// get the bytes for stop row
|
||||
applicationRowKeyPrefix = new ApplicationRowKeyPrefix(
|
||||
@ -420,7 +420,7 @@ protected ResultScanner getResults(Configuration hbaseConf,
|
||||
context.getFlowRunId());
|
||||
|
||||
// set stop row
|
||||
scan.setStopRow(
|
||||
scan.withStopRow(
|
||||
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
|
||||
applicationRowKeyPrefix.getRowKeyPrefix()));
|
||||
}
|
||||
|
@ -158,9 +158,11 @@ private static byte[] getNextRowKey(byte[] currRowKeyPrefix,
|
||||
private ResultScanner getResult(Configuration hbaseConf, Connection conn,
|
||||
FilterList filterList, byte[] startPrefix, byte[] endPrefix)
|
||||
throws IOException {
|
||||
Scan scan = new Scan(startPrefix, endPrefix);
|
||||
scan.setFilter(filterList);
|
||||
scan.setSmall(true);
|
||||
Scan scan = new Scan()
|
||||
.withStartRow(startPrefix)
|
||||
.withStopRow(endPrefix)
|
||||
.setFilter(filterList)
|
||||
.setSmall(true);
|
||||
return ENTITY_TABLE.getResultScanner(hbaseConf, conn, scan);
|
||||
}
|
||||
|
||||
|
@ -133,16 +133,16 @@ && getFilters().getCreatedTimeEnd() == Long.MAX_VALUE) {
|
||||
throw new BadRequestException(
|
||||
"fromid doesn't belong to clusterId=" + clusterId);
|
||||
}
|
||||
scan.setStartRow(key.getRowKey());
|
||||
scan.setStopRow(
|
||||
scan.withStartRow(key.getRowKey());
|
||||
scan.withStopRow(
|
||||
new FlowActivityRowKeyPrefix(clusterId,
|
||||
(getFilters().getCreatedTimeBegin() <= 0 ? 0
|
||||
: (getFilters().getCreatedTimeBegin() - 1)))
|
||||
.getRowKeyPrefix());
|
||||
} else {
|
||||
scan.setStartRow(new FlowActivityRowKeyPrefix(clusterId, getFilters()
|
||||
scan.withStartRow(new FlowActivityRowKeyPrefix(clusterId, getFilters()
|
||||
.getCreatedTimeEnd()).getRowKeyPrefix());
|
||||
scan.setStopRow(new FlowActivityRowKeyPrefix(clusterId, (getFilters()
|
||||
scan.withStopRow(new FlowActivityRowKeyPrefix(clusterId, (getFilters()
|
||||
.getCreatedTimeBegin() <= 0 ? 0
|
||||
: (getFilters().getCreatedTimeBegin() - 1))).getRowKeyPrefix());
|
||||
}
|
||||
|
@ -241,14 +241,14 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
|
||||
"fromid doesn't belong to clusterId=" + context.getClusterId());
|
||||
}
|
||||
// set start row
|
||||
scan.setStartRow(flowRunRowKey.getRowKey());
|
||||
scan.withStartRow(flowRunRowKey.getRowKey());
|
||||
|
||||
// get the bytes for stop row
|
||||
flowRunRowKeyPrefix = new FlowRunRowKeyPrefix(context.getClusterId(),
|
||||
context.getUserId(), context.getFlowName());
|
||||
|
||||
// set stop row
|
||||
scan.setStopRow(
|
||||
scan.withStopRow(
|
||||
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
|
||||
flowRunRowKeyPrefix.getRowKeyPrefix()));
|
||||
}
|
||||
|
@ -519,7 +519,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
|
||||
}
|
||||
|
||||
// set start row
|
||||
scan.setStartRow(entityRowKey.getRowKey());
|
||||
scan.withStartRow(entityRowKey.getRowKey());
|
||||
|
||||
// get the bytes for stop row
|
||||
entityRowKeyPrefix = new EntityRowKeyPrefix(context.getClusterId(),
|
||||
@ -527,7 +527,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
|
||||
context.getAppId(), context.getEntityType(), null, null);
|
||||
|
||||
// set stop row
|
||||
scan.setStopRow(
|
||||
scan.withStopRow(
|
||||
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
|
||||
entityRowKeyPrefix.getRowKeyPrefix()));
|
||||
|
||||
|
@ -372,7 +372,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
|
||||
}
|
||||
|
||||
// set start row
|
||||
scan.setStartRow(entityRowKey.getRowKey());
|
||||
scan.withStartRow(entityRowKey.getRowKey());
|
||||
|
||||
// get the bytes for stop row
|
||||
subApplicationRowKeyPrefix = new SubApplicationRowKeyPrefix(
|
||||
@ -380,7 +380,7 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
|
||||
context.getEntityType(), null, null, null);
|
||||
|
||||
// set stop row
|
||||
scan.setStopRow(
|
||||
scan.withStopRow(
|
||||
HBaseTimelineStorageUtils.calculateTheClosestNextRowKeyForPrefix(
|
||||
subApplicationRowKeyPrefix.getRowKeyPrefix()));
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user