(cherry picked from commit 1b29c9bfee
)
Co-authored-by: Masatake Iwasaki <iwasakims@apache.org>
This commit is contained in:
parent
bc5458bbd4
commit
71bda1a2e8
@ -16,8 +16,8 @@
|
|||||||
-->
|
-->
|
||||||
<FindBugsFilter>
|
<FindBugsFilter>
|
||||||
<Match>
|
<Match>
|
||||||
<Class name="org.apache.hadoop.fs.cosn.CosNInputStream.ReadBuffer"/>
|
<Class name="org.apache.hadoop.fs.cosn.CosNInputStream$ReadBuffer"/>
|
||||||
<Method name="getBuffer"/>
|
<Method name="getBuffer"/>
|
||||||
<Bug pattern="EI_EXPOSE_REP"/>h_LIB
|
<Bug pattern="EI_EXPOSE_REP"/>
|
||||||
</Match>
|
</Match>
|
||||||
</FindBugsFilter>
|
</FindBugsFilter>
|
||||||
|
@ -3728,7 +3728,7 @@ void incrUserConnections(String user) {
|
|||||||
if (count == null) {
|
if (count == null) {
|
||||||
count = 1;
|
count = 1;
|
||||||
} else {
|
} else {
|
||||||
count++;
|
count = count + 1;
|
||||||
}
|
}
|
||||||
userToConnectionsMap.put(user, count);
|
userToConnectionsMap.put(user, count);
|
||||||
}
|
}
|
||||||
@ -3740,7 +3740,7 @@ void decrUserConnections(String user) {
|
|||||||
if (count == null) {
|
if (count == null) {
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
count--;
|
count = count - 1;
|
||||||
}
|
}
|
||||||
if (count == 0) {
|
if (count == 0) {
|
||||||
userToConnectionsMap.remove(user);
|
userToConnectionsMap.remove(user);
|
||||||
|
@ -354,23 +354,29 @@ private class ResultHandler
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onSuccess(@Nonnull VolumeCheckResult result) {
|
public void onSuccess(VolumeCheckResult result) {
|
||||||
switch(result) {
|
if (result == null) {
|
||||||
case HEALTHY:
|
LOG.error("Unexpected health check result null for volume {}",
|
||||||
case DEGRADED:
|
|
||||||
LOG.debug("Volume {} is {}.", reference.getVolume(), result);
|
|
||||||
markHealthy();
|
|
||||||
break;
|
|
||||||
case FAILED:
|
|
||||||
LOG.warn("Volume {} detected as being unhealthy",
|
|
||||||
reference.getVolume());
|
reference.getVolume());
|
||||||
markFailed();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
LOG.error("Unexpected health check result {} for volume {}",
|
|
||||||
result, reference.getVolume());
|
|
||||||
markHealthy();
|
markHealthy();
|
||||||
break;
|
} else {
|
||||||
|
switch(result) {
|
||||||
|
case HEALTHY:
|
||||||
|
case DEGRADED:
|
||||||
|
LOG.debug("Volume {} is {}.", reference.getVolume(), result);
|
||||||
|
markHealthy();
|
||||||
|
break;
|
||||||
|
case FAILED:
|
||||||
|
LOG.warn("Volume {} detected as being unhealthy",
|
||||||
|
reference.getVolume());
|
||||||
|
markFailed();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
LOG.error("Unexpected health check result {} for volume {}",
|
||||||
|
result, reference.getVolume());
|
||||||
|
markHealthy();
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
cleanup();
|
cleanup();
|
||||||
}
|
}
|
||||||
|
@ -166,7 +166,7 @@ private void addResultCachingCallback(
|
|||||||
Checkable<K, V> target, ListenableFuture<V> lf) {
|
Checkable<K, V> target, ListenableFuture<V> lf) {
|
||||||
Futures.addCallback(lf, new FutureCallback<V>() {
|
Futures.addCallback(lf, new FutureCallback<V>() {
|
||||||
@Override
|
@Override
|
||||||
public void onSuccess(@Nullable V result) {
|
public void onSuccess(V result) {
|
||||||
synchronized (ThrottledAsyncChecker.this) {
|
synchronized (ThrottledAsyncChecker.this) {
|
||||||
checksInProgress.remove(target);
|
checksInProgress.remove(target);
|
||||||
completedChecks.put(target, new LastCheckResult<>(
|
completedChecks.put(target, new LastCheckResult<>(
|
||||||
|
@ -1238,7 +1238,7 @@ private void incrOpCount(FSEditLogOpCodes opCode,
|
|||||||
holder = new Holder<Integer>(1);
|
holder = new Holder<Integer>(1);
|
||||||
opCounts.put(opCode, holder);
|
opCounts.put(opCode, holder);
|
||||||
} else {
|
} else {
|
||||||
holder.held++;
|
holder.held = holder.held + 1;
|
||||||
}
|
}
|
||||||
counter.increment();
|
counter.increment();
|
||||||
}
|
}
|
||||||
|
@ -533,5 +533,17 @@
|
|||||||
<Class name="org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage$1" />
|
<Class name="org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage$1" />
|
||||||
<Bug pattern="SE_BAD_FIELD_INNER_CLASS" />
|
<Bug pattern="SE_BAD_FIELD_INNER_CLASS" />
|
||||||
</Match>
|
</Match>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
HADOOP-17138: Suppress warnings about unchecked Nullable
|
||||||
|
since the methoad catches NullPointerException then registerError.
|
||||||
|
-->
|
||||||
|
<Match>
|
||||||
|
<Or>
|
||||||
|
<Class name="org.apache.hadoop.mapred.LocatedFileStatusFetcher$ProcessInputDirCallback" />
|
||||||
|
<Class name="org.apache.hadoop.mapred.LocatedFileStatusFetcher$ProcessInitialInputPathCallback" />
|
||||||
|
</Or>
|
||||||
|
<Method name="onSuccess" />
|
||||||
|
<Bug pattern="NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE" />
|
||||||
|
</Match>
|
||||||
</FindBugsFilter>
|
</FindBugsFilter>
|
||||||
|
@ -813,7 +813,7 @@ private void increaseQueueAppNum(String queue) throws YarnException {
|
|||||||
if (appNum == null) {
|
if (appNum == null) {
|
||||||
appNum = 1;
|
appNum = 1;
|
||||||
} else {
|
} else {
|
||||||
appNum++;
|
appNum = appNum + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
queueAppNumMap.put(queueName, appNum);
|
queueAppNumMap.put(queueName, appNum);
|
||||||
|
@ -705,4 +705,10 @@
|
|||||||
<Method name="getDevices" />
|
<Method name="getDevices" />
|
||||||
<Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
|
<Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
|
||||||
</Match>
|
</Match>
|
||||||
|
|
||||||
|
<!-- Suppress warning about anonymous class for mocking. -->
|
||||||
|
<Match>
|
||||||
|
<Class name="~org\.apache\.hadoop\.yarn\.server\.timelineservice\.reader\.TestTimelineReaderWebServicesHBaseStorage.*" />
|
||||||
|
<Bug pattern="UMAC_UNCALLABLE_METHOD_OF_ANONYMOUS_CLASS" />
|
||||||
|
</Match>
|
||||||
</FindBugsFilter>
|
</FindBugsFilter>
|
||||||
|
@ -181,14 +181,13 @@ private static void waitForHBaseDown(HBaseTimelineReaderImpl htr) throws
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void checkQuery(HBaseTimelineReaderImpl htr) throws
|
private static Set<TimelineEntity> checkQuery(HBaseTimelineReaderImpl htr)
|
||||||
IOException {
|
throws IOException {
|
||||||
TimelineReaderContext context =
|
TimelineReaderContext context =
|
||||||
new TimelineReaderContext(YarnConfiguration.DEFAULT_RM_CLUSTER_ID,
|
new TimelineReaderContext(YarnConfiguration.DEFAULT_RM_CLUSTER_ID,
|
||||||
null, null, null, null, TimelineEntityType
|
null, null, null, null, TimelineEntityType
|
||||||
.YARN_FLOW_ACTIVITY.toString(), null, null);
|
.YARN_FLOW_ACTIVITY.toString(), null, null);
|
||||||
Set<TimelineEntity> entities = htr.getEntities(context, MONITOR_FILTERS,
|
return htr.getEntities(context, MONITOR_FILTERS, DATA_TO_RETRIEVE);
|
||||||
DATA_TO_RETRIEVE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void configure(HBaseTestingUtility util) {
|
private static void configure(HBaseTestingUtility util) {
|
||||||
|
Loading…
Reference in New Issue
Block a user