YARN-5142. fix findbugs warnings/errors for hadoop-yarn-server-timelineservice-hbase-tests. (Vrushali C via Varun Saxena)

This commit is contained in:
Varun Saxena 2016-06-04 19:20:43 +05:30 committed by Sangjin Lee
parent 0a9b085f05
commit 9c926cf432
6 changed files with 120 additions and 72 deletions

View File

@ -345,7 +345,9 @@ private static void loadData() throws Exception {
"application_1111111111_1111", te5);
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
}
@ -390,7 +392,7 @@ private static ClientResponse getResponse(Client client, URI uri)
.type(MediaType.APPLICATION_JSON).get(ClientResponse.class);
if (resp == null ||
resp.getClientResponseStatus() != ClientResponse.Status.OK) {
String msg = new String();
String msg = "";
if (resp != null) {
msg = resp.getClientResponseStatus().toString();
}

View File

@ -678,32 +678,34 @@ public void testWriteApplicationToHBase() throws Exception {
assertEquals(infoMap, infoColumns);
// Remember isRelatedTo is of type Map<String, Set<String>>
for (String isRelatedToKey : isRelatedTo.keySet()) {
for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo
.entrySet()) {
Object isRelatedToValue =
ApplicationColumnPrefix.IS_RELATED_TO.readResult(result,
isRelatedToKey);
isRelatedToEntry.getKey());
String compoundValue = isRelatedToValue.toString();
// id7?id9?id6
Set<String> isRelatedToValues =
new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(isRelatedTo.get(isRelatedToKey).size(),
assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(),
isRelatedToValues.size());
for (String v : isRelatedTo.get(isRelatedToKey)) {
for (String v : isRelatedToEntry.getValue()) {
assertTrue(isRelatedToValues.contains(v));
}
}
// RelatesTo
for (String relatesToKey : relatesTo.keySet()) {
for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo
.entrySet()) {
String compoundValue =
ApplicationColumnPrefix.RELATES_TO.readResult(result,
relatesToKey).toString();
relatesToEntry.getKey()).toString();
// id3?id4?id5
Set<String> relatesToValues =
new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
assertEquals(relatesTo.get(relatesToKey).size(),
assertEquals(relatesTo.get(relatesToEntry.getKey()).size(),
relatesToValues.size());
for (String v : relatesTo.get(relatesToKey)) {
for (String v : relatesToEntry.getValue()) {
assertTrue(relatesToValues.contains(v));
}
}
@ -938,41 +940,43 @@ public void testWriteEntityToHBase() throws Exception {
assertEquals(infoMap, infoColumns);
// Remember isRelatedTo is of type Map<String, Set<String>>
for (String isRelatedToKey : isRelatedTo.keySet()) {
for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo
.entrySet()) {
Object isRelatedToValue =
EntityColumnPrefix.IS_RELATED_TO.readResult(result,
isRelatedToKey);
isRelatedToEntry.getKey());
String compoundValue = isRelatedToValue.toString();
// id7?id9?id6
Set<String> isRelatedToValues =
new HashSet<String>(
Separator.VALUES.splitEncoded(compoundValue));
assertEquals(isRelatedTo.get(isRelatedToKey).size(),
assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(),
isRelatedToValues.size());
for (String v : isRelatedTo.get(isRelatedToKey)) {
for (String v : isRelatedToEntry.getValue()) {
assertTrue(isRelatedToValues.contains(v));
}
}
// RelatesTo
for (String relatesToKey : relatesTo.keySet()) {
String compoundValue =
EntityColumnPrefix.RELATES_TO.readResult(result, relatesToKey)
.toString();
for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo
.entrySet()) {
String compoundValue = EntityColumnPrefix.RELATES_TO
.readResult(result, relatesToEntry.getKey()).toString();
// id3?id4?id5
Set<String> relatesToValues =
new HashSet<String>(
Separator.VALUES.splitEncoded(compoundValue));
assertEquals(relatesTo.get(relatesToKey).size(),
assertEquals(relatesTo.get(relatesToEntry.getKey()).size(),
relatesToValues.size());
for (String v : relatesTo.get(relatesToKey)) {
for (String v : relatesToEntry.getValue()) {
assertTrue(relatesToValues.contains(v));
}
}
// Configuration
Map<String, Object> configColumns =
EntityColumnPrefix.CONFIG.readResults(result, StringKeyConverter.getInstance());
EntityColumnPrefix.CONFIG.readResults(result,
StringKeyConverter.getInstance());
assertEquals(conf, configColumns);
NavigableMap<String, NavigableMap<Long, Number>> metricsResult =
@ -1273,8 +1277,10 @@ public void testEventsWithEmptyInfo() throws IOException {
assertTrue(info == null || info.isEmpty());
}
} finally {
hbi.stop();
hbi.close();
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}

View File

@ -55,8 +55,8 @@ static TimelineEntity getEntityMetricsApp1(long insertTs, Configuration c1) {
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = insertTs;
for (int k=1; k< 100 ; k++) {
metricValues.put(ts - k*200000, 20L);
for (int k = 1; k < 100; k++) {
metricValues.put(ts - k * 200000L, 20L);
}
metricValues.put(ts - 80000, 40L);
m1.setType(Type.TIME_SERIES);
@ -68,7 +68,7 @@ static TimelineEntity getEntityMetricsApp1(long insertTs, Configuration c1) {
metricValues = new HashMap<Long, Number>();
ts = System.currentTimeMillis();
for (int k=1; k< 100 ; k++) {
metricValues.put(ts - k*100000, 31L);
metricValues.put(ts - k*100000L, 31L);
}
metricValues.put(ts - 80000, 57L);

View File

@ -148,7 +148,9 @@ public void testWriteFlowRunMinMax() throws Exception {
// flush everything to hbase
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
Connection conn = ConnectionFactory.createConnection(c1);
@ -199,7 +201,9 @@ public void testWriteFlowRunMinMax() throws Exception {
assertEquals(1, flowRuns.size());
}
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}
@ -230,7 +234,9 @@ public void testWriteFlowActivityOneFlow() throws Exception {
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTable(cluster, user, flow, flowVersion, runid, c1,
@ -260,7 +266,9 @@ public void testWriteFlowActivityOneFlow() throws Exception {
}
}
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}
@ -351,7 +359,9 @@ public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException {
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
// check flow activity
checkFlowActivityTableSeveralRuns(cluster, user, flow, c1, flowVersion1,
@ -396,7 +406,9 @@ public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException {
}
}
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}

View File

@ -75,8 +75,8 @@ public class TestHBaseStorageFlowRun {
private static HBaseTestingUtility util;
private final String metric1 = "MAP_SLOT_MILLIS";
private final String metric2 = "HDFS_BYTES_READ";
private static final String METRIC1 = "MAP_SLOT_MILLIS";
private static final String METRIC2 = "HDFS_BYTES_READ";
@BeforeClass
public static void setupBeforeClass() throws Exception {
@ -213,7 +213,9 @@ public void testWriteFlowRunMinMax() throws Exception {
// flush everything to hbase
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
Connection conn = ConnectionFactory.createConnection(c1);
@ -257,7 +259,9 @@ public void testWriteFlowRunMinMax() throws Exception {
assertEquals(minStartTs, flowRun.getStartTime());
assertEquals(endTs, flowRun.getMaxEndTime());
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}
@ -299,7 +303,9 @@ public void testWriteFlowRunMetricsOneFlow() throws Exception {
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
// check flow run
@ -327,10 +333,10 @@ public void testWriteFlowRunMetricsOneFlow() throws Exception {
value = n;
}
switch (id) {
case metric1:
case METRIC1:
assertEquals(141L, value);
break;
case metric2:
case METRIC2:
assertEquals(57L, value);
break;
default:
@ -338,7 +344,9 @@ public void testWriteFlowRunMetricsOneFlow() throws Exception {
}
}
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}
@ -365,14 +373,14 @@ private void checkFlowRunTable(String cluster, String user, String flow,
rowCount++;
// check metric1
byte[] q = ColumnHelper.getColumnQualifier(
FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric1);
FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), METRIC1);
assertTrue(values.containsKey(q));
assertEquals(141L, Bytes.toLong(values.get(q)));
// check metric2
assertEquals(3, values.size());
q = ColumnHelper.getColumnQualifier(
FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric2);
FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), METRIC2);
assertTrue(values.containsKey(q));
assertEquals(57L, Bytes.toLong(values.get(q)));
}
@ -407,7 +415,9 @@ public void testWriteFlowRunMetricsPrefix() throws Exception {
hbi.write(cluster, user, flow, flowVersion, 1002345678918L, appName, te);
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
@ -418,7 +428,7 @@ public void testWriteFlowRunMetricsPrefix() throws Exception {
hbr.start();
TimelineFilterList metricsToRetrieve = new TimelineFilterList(
Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL,
metric1.substring(0, metric1.indexOf("_") + 1)));
METRIC1.substring(0, METRIC1.indexOf("_") + 1)));
TimelineEntity entity = hbr.getEntity(
new TimelineReaderContext(cluster, user, flow, 1002345678919L, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
@ -435,7 +445,7 @@ Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL,
value = n;
}
switch (id) {
case metric1:
case METRIC1:
assertEquals(40L, value);
break;
default:
@ -455,7 +465,9 @@ Operator.OR, new TimelinePrefixFilter(TimelineCompareOp.EQUAL,
}
assertEquals(2, metricCnt);
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}
@ -488,7 +500,9 @@ public void testWriteFlowRunsMetricFields() throws Exception {
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
// check flow run
@ -528,10 +542,10 @@ public void testWriteFlowRunsMetricFields() throws Exception {
value = n;
}
switch (id) {
case metric1:
case METRIC1:
assertEquals(141L, value);
break;
case metric2:
case METRIC2:
assertEquals(57L, value);
break;
default:
@ -540,7 +554,9 @@ public void testWriteFlowRunsMetricFields() throws Exception {
}
}
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}
@ -595,8 +611,10 @@ public void testWriteFlowRunFlush() throws Exception {
}
}
} finally {
hbi.flush();
hbi.close();
if (hbi != null) {
hbi.flush();
hbi.close();
}
checkMinMaxFlush(c1, minTS, startTs, count, cluster, user, flow, runid,
true);
}
@ -665,7 +683,9 @@ public void testFilterFlowRunsByCreatedTime() throws Exception {
"application_11111111111111_2222", te);
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
@ -711,7 +731,9 @@ public void testFilterFlowRunsByCreatedTime() throws Exception {
}
}
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}
@ -742,7 +764,9 @@ public void testMetricFilters() throws Exception {
"application_11111111111111_2222", te);
hbi.flush();
} finally {
hbi.close();
if (hbi != null) {
hbi.close();
}
}
// use the timeline reader to verify data
@ -754,12 +778,12 @@ public void testMetricFilters() throws Exception {
TimelineFilterList list1 = new TimelineFilterList();
list1.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, metric1, 101));
TimelineCompareOp.GREATER_OR_EQUAL, METRIC1, 101));
TimelineFilterList list2 = new TimelineFilterList();
list2.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, metric1, 43));
TimelineCompareOp.LESS_THAN, METRIC1, 43));
list2.addFilter(new TimelineCompareFilter(
TimelineCompareOp.EQUAL, metric2, 57));
TimelineCompareOp.EQUAL, METRIC2, 57));
TimelineFilterList metricFilterList =
new TimelineFilterList(Operator.OR, list1, list2);
Set<TimelineEntity> entities = hbr.getEntities(
@ -777,8 +801,8 @@ public void testMetricFilters() throws Exception {
TimelineFilterList metricFilterList1 = new TimelineFilterList(
new TimelineCompareFilter(
TimelineCompareOp.LESS_OR_EQUAL, metric1, 127),
new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, metric2, 30));
TimelineCompareOp.LESS_OR_EQUAL, METRIC1, 127),
new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, METRIC2, 30));
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
@ -793,8 +817,8 @@ public void testMetricFilters() throws Exception {
assertEquals(2, metricCnt);
TimelineFilterList metricFilterList2 = new TimelineFilterList(
new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, metric1, 32),
new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, metric2, 57));
new TimelineCompareFilter(TimelineCompareOp.LESS_THAN, METRIC1, 32),
new TimelineCompareFilter(TimelineCompareOp.NOT_EQUAL, METRIC2, 57));
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
@ -815,17 +839,17 @@ public void testMetricFilters() throws Exception {
TimelineFilterList list3 = new TimelineFilterList();
list3.addFilter(new TimelineCompareFilter(
TimelineCompareOp.GREATER_OR_EQUAL, metric1, 101));
TimelineCompareOp.GREATER_OR_EQUAL, METRIC1, 101));
TimelineFilterList list4 = new TimelineFilterList();
list4.addFilter(new TimelineCompareFilter(
TimelineCompareOp.LESS_THAN, metric1, 43));
TimelineCompareOp.LESS_THAN, METRIC1, 43));
list4.addFilter(new TimelineCompareFilter(
TimelineCompareOp.EQUAL, metric2, 57));
TimelineCompareOp.EQUAL, METRIC2, 57));
TimelineFilterList metricFilterList4 =
new TimelineFilterList(Operator.OR, list3, list4);
TimelineFilterList metricsToRetrieve = new TimelineFilterList(Operator.OR,
new TimelinePrefixFilter(TimelineCompareOp.EQUAL,
metric2.substring(0, metric2.indexOf("_") + 1)));
METRIC2.substring(0, METRIC2.indexOf("_") + 1)));
entities = hbr.getEntities(
new TimelineReaderContext(cluster, user, flow, null, null,
TimelineEntityType.YARN_FLOW_RUN.toString(), null),
@ -840,7 +864,9 @@ public void testMetricFilters() throws Exception {
}
assertEquals(1, metricCnt);
} finally {
hbr.close();
if (hbr != null) {
hbr.close();
}
}
}

View File

@ -69,8 +69,8 @@ public class TestHBaseStorageFlowRunCompaction {
private static HBaseTestingUtility util;
private final String metric1 = "MAP_SLOT_MILLIS";
private final String metric2 = "HDFS_BYTES_READ";
private static final String metric1 = "MAP_SLOT_MILLIS";
private static final String metric2 = "HDFS_BYTES_READ";
private final byte[] aRowKey = Bytes.toBytes("a");
private final byte[] aFamily = Bytes.toBytes("family");
@ -166,9 +166,11 @@ public void testWriteFlowRunCompaction() throws Exception {
entityApp1 = TestFlowDataGenerator.getEntityMetricsApp1Complete(
insertTs + 1, c1);
te1.addEntity(entityApp1);
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
hbi.flush();
hbi.close();
if (hbi != null) {
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
hbi.flush();
hbi.close();
}
}
// check in flow run table