From 6cf6ab7b780de2b0c2c9ea730e1f366965a0d682 Mon Sep 17 00:00:00 2001
From: Sangjin Lee
Date: Sun, 10 Jul 2016 08:38:19 -0700
Subject: [PATCH] Made a number of miscellaneous fixes for javac, javadoc, and
checstyle warnings.
---
.../jobhistory/JobHistoryEventHandler.java | 4 -
.../v2/app/rm/RMContainerAllocator.java | 2 +-
.../TestJobHistoryEventHandler.java | 3 +-
.../mapreduce/jobhistory/HistoryEvent.java | 12 +-
.../mapred/TestMRTimelineEventHandling.java | 24 ++-
.../apache/hadoop/mapred/UtilsForTests.java | 4 +-
.../hadoop/mapreduce/EntityWriterV2.java | 3 +-
.../JobHistoryFileReplayMapperV2.java | 4 +-
.../SimpleEntityWriterConstants.java | 14 +-
.../mapreduce/SimpleEntityWriterV1.java | 6 +-
.../mapreduce/TimelineEntityConverterV1.java | 10 +-
.../mapreduce/TimelineEntityConverterV2.java | 11 +-
.../mapreduce/TimelineServicePerformance.java | 3 +-
.../mapreduce/v2/MiniMRYarnCluster.java | 5 +-
.../records/timelineservice/package-info.java | 1 -
.../hadoop/yarn/conf/YarnConfiguration.java | 7 +-
.../yarn/util/TimelineServiceHelper.java | 2 +
.../distributedshell/ApplicationMaster.java | 27 ++-
.../TestDistributedShell.java | 23 ++-
.../TestDistributedShellWithNodeLabels.java | 2 -
.../hadoop/yarn/client/api/AMRMClient.java | 10 +-
.../client/api/async/AMRMClientAsync.java | 4 +-
.../yarn/client/api/TimelineClient.java | 34 ++--
.../client/api/impl/TimelineClientImpl.java | 42 ++--
.../TestTimelineServiceRecords.java | 15 +-
.../api/impl/TestTimelineClientV2Impl.java | 37 ++--
.../yarn/util/TestTimelineServiceHelper.java | 21 +-
.../api/CollectorNodemanagerProtocol.java | 12 +-
.../api/CollectorNodemanagerProtocolPB.java | 3 +-
...lectorNodemanagerProtocolPBClientImpl.java | 4 +-
...ectorNodemanagerProtocolPBServiceImpl.java | 6 +-
...TimelineCollectorContextRequestPBImpl.java | 25 ++-
...imelineCollectorContextResponsePBImpl.java | 24 ++-
.../impl/pb/NodeHeartbeatRequestPBImpl.java | 9 +-
.../impl/pb/NodeHeartbeatResponsePBImpl.java | 6 +-
.../ReportNewCollectorInfoRequestPBImpl.java | 16 +-
.../ReportNewCollectorInfoResponsePBImpl.java | 12 +-
.../impl/pb/AppCollectorsMapPBImpl.java | 17 +-
.../java/org/apache/hadoop/yarn/TestRPC.java | 55 +++--
.../yarn/server/nodemanager/NodeManager.java | 4 +-
.../nodemanager/NodeStatusUpdaterImpl.java | 20 +-
.../ContainerManagerImpl.java | 8 +-
.../ApplicationContainerFinishedEvent.java | 3 +-
.../container/ContainerImpl.java | 2 -
.../monitor/ContainersMonitorImpl.java | 5 +-
.../timelineservice/NMTimelinePublisher.java | 11 +-
.../nodemanager/TestNodeStatusUpdater.java | 5 +-
.../application/TestApplication.java | 5 +-
.../launcher/TestContainerLaunch.java | 22 +-
.../TestNMTimelinePublisher.java | 4 +-
.../server/nodemanager/webapp/MockApp.java | 8 +-
.../server/resourcemanager/RMContextImpl.java | 3 +-
.../metrics/TimelineServiceV1Publisher.java | 2 +-
.../TestSystemMetricsPublisherForV2.java | 29 +--
.../TestTimelineServiceClientIntegration.java | 9 +-
...TimelineReaderWebServicesHBaseStorage.java | 51 ++---
.../storage/TestHBaseTimelineStorage.java | 192 +++++++++---------
...stPhoenixOfflineAggregationWriterImpl.java | 4 +-
.../storage/flow/TestFlowDataGenerator.java | 39 ++--
.../flow/TestHBaseStorageFlowActivity.java | 13 +-
.../storage/flow/TestHBaseStorageFlowRun.java | 10 +-
.../TestHBaseStorageFlowRunCompaction.java | 33 +--
.../TimelineCollectorWebService.java | 3 +-
.../storage/application/package-info.java | 1 -
.../storage/common/AppIdKeyConverter.java | 3 +-
.../storage/common/TimestampGenerator.java | 3 +-
.../storage/entity/package-info.java | 1 -
.../TestNMTimelineCollectorManager.java | 24 +--
...stPerNodeTimelineCollectorsAuxService.java | 16 +-
.../reader/TestTimelineReaderUtils.java | 6 +-
.../reader/TestTimelineReaderWebServices.java | 11 +-
.../TestTimelineReaderWebServicesUtils.java | 4 +-
.../reader/TestTimelineUIDConverter.java | 2 +-
.../TestFileSystemTimelineReaderImpl.java | 44 ++--
.../TestFileSystemTimelineWriterImpl.java | 7 +-
.../storage/common/TestRowKeys.java | 22 +-
.../storage/common/TestSeparator.java | 28 +--
.../src/site/markdown/TimelineServiceV2.md | 1 -
78 files changed, 634 insertions(+), 543 deletions(-)
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index 9b59676641..817cd14593 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -46,7 +46,6 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TaskStatus;
import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.JobCounter;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -76,11 +75,8 @@
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.JsonNodeFactory;
-import org.codehaus.jackson.node.ObjectNode;
import com.google.common.annotations.VisibleForTesting;
import com.sun.jersey.api.client.ClientHandlerException;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
index ea2046b319..217337ef6f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
@@ -829,7 +829,7 @@ private List getResources() throws Exception {
if (collectorAddr != null && !collectorAddr.isEmpty()
&& appContext.getTimelineClient() != null) {
appContext.getTimelineClient().setTimelineServiceAddress(
- response.getCollectorAddr());
+ response.getCollectorAddr());
}
for (ContainerStatus cont : finishedContainers) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
index a84e6d21e6..064f9ec29f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
@@ -786,7 +786,8 @@ public TestParams() {
public TestParams(boolean isLastAMRetry) {
this(AppContext.class, isLastAMRetry);
}
- public TestParams(Class extends AppContext> contextClass, boolean isLastAMRetry) {
+ public TestParams(Class extends AppContext> contextClass,
+ boolean isLastAMRetry) {
this.isLastAMRetry = isLastAMRetry;
mockAppContext = mockAppContext(contextClass, appId, this.isLastAMRetry);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java
index 1d59ebef89..1ba7195ec2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java
@@ -42,9 +42,17 @@ public interface HistoryEvent {
/** Set the Avro datum wrapped by this. */
void setDatum(Object datum);
- /** Map HistoryEvent to TimelineEvent */
+ /**
+ * Map HistoryEvent to TimelineEvent.
+ *
+ * @return the timeline event
+ */
TimelineEvent toTimelineEvent();
- /** Counters or Metrics if any else return null. */
+ /**
+ * Counters or Metrics if any else return null.
+ *
+ * @return the set of timeline metrics
+ */
Set getTimelineMetrics();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
index 6b9f27e2aa..90748a9002 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
@@ -64,7 +64,7 @@ public class TestMRTimelineEventHandling {
private static final String TIMELINE_AUX_SERVICE_NAME = "timeline_collector";
private static final Log LOG =
- LogFactory.getLog(TestMRTimelineEventHandling.class);
+ LogFactory.getLog(TestMRTimelineEventHandling.class);
@Test
public void testTimelineServiceStartInMiniCluster() throws Exception {
@@ -169,6 +169,7 @@ public void testMRTimelineEventHandling() throws Exception {
}
}
+ @SuppressWarnings("deprecation")
@Test
public void testMRNewTimelineServiceEventHandling() throws Exception {
LOG.info("testMRNewTimelineServiceEventHandling start.");
@@ -183,7 +184,7 @@ public void testMRNewTimelineServiceEventHandling() throws Exception {
// enable aux-service based timeline collectors
conf.set(YarnConfiguration.NM_AUX_SERVICES, TIMELINE_AUX_SERVICE_NAME);
conf.set(YarnConfiguration.NM_AUX_SERVICES + "." + TIMELINE_AUX_SERVICE_NAME
- + ".class", PerNodeTimelineCollectorsAuxService.class.getName());
+ + ".class", PerNodeTimelineCollectorsAuxService.class.getName());
conf.setBoolean(YarnConfiguration.SYSTEM_METRICS_PUBLISHER_ENABLED, true);
@@ -245,7 +246,8 @@ public void testMRNewTimelineServiceEventHandling() throws Exception {
}
// Cleanup test file
String testRoot =
- FileSystemTimelineWriterImpl.DEFAULT_TIMELINE_SERVICE_STORAGE_DIR_ROOT;
+ FileSystemTimelineWriterImpl.
+ DEFAULT_TIMELINE_SERVICE_STORAGE_DIR_ROOT;
File testRootFolder = new File(testRoot);
if(testRootFolder.isDirectory()) {
FileUtils.deleteDirectory(testRootFolder);
@@ -320,8 +322,10 @@ private void checkNewTimelineEvent(ApplicationId appId,
" does not exist.",
taskFolder.isDirectory());
- String taskEventFileName = appId.toString().replaceAll("application", "task")
- + "_m_000000" + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
+ String taskEventFileName =
+ appId.toString().replaceAll("application", "task") +
+ "_m_000000" +
+ FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
String taskEventFilePath = outputDirTask + taskEventFileName;
File taskEventFile = new File(taskEventFilePath);
@@ -372,10 +376,12 @@ private void verifyEntity(File entityFile, String eventId,
reader = new BufferedReader(new FileReader(entityFile));
while ((strLine = reader.readLine()) != null) {
if (strLine.trim().length() > 0) {
- org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
- FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(
- strLine.trim(),
- org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.class);
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+ entity =
+ FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(
+ strLine.trim(),
+ org.apache.hadoop.yarn.api.records.timelineservice.
+ TimelineEntity.class);
if (eventId == null) {
// Job metrics are published without any events for
// ApplicationEntity. There is also possibility that some other
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
index 2fb6828e92..935c175e70 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
@@ -621,8 +621,8 @@ static RunningJob runJob(JobConf conf, Path inDir, Path outDir, int numMaps,
return job;
}
- public static void waitForAppFinished(RunningJob job, MiniMRYarnCluster cluster)
- throws IOException {
+ public static void waitForAppFinished(RunningJob job,
+ MiniMRYarnCluster cluster) throws IOException {
ApplicationId appId = ApplicationId.newInstance(
Long.parseLong(job.getID().getJtIdentifier()), job.getID().getId());
ConcurrentMap rmApps =
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java
index f5d95c3a40..74d7b9476e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java
@@ -33,7 +33,8 @@
* to the timeline service.
*/
abstract class EntityWriterV2
- extends org.apache.hadoop.mapreduce.Mapper {
+ extends org.apache.hadoop.mapreduce.Mapper
+ {
@Override
public void map(IntWritable key, IntWritable val, Context context)
throws IOException {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java
index 6a9a878c73..2ec48336e7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java
@@ -89,8 +89,8 @@ protected void writeEntities(Configuration tlConf,
parser.parseHistoryFile(job.getJobHistoryFilePath());
Configuration jobConf =
parser.parseConfiguration(job.getJobConfFilePath());
- LOG.info("parsed the job history file and the configuration file for job"
- + jobIdStr);
+ LOG.info("parsed the job history file and the configuration file " +
+ "for job " + jobIdStr);
// set the context
// flow id: job name, flow run id: timestamp, user id
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java
index b89d0e879b..d96ad7615a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java
@@ -32,12 +32,12 @@ interface SimpleEntityWriterConstants {
/**
* To ensure that the compression really gets exercised, generate a
- * random alphanumeric fixed length payload
+ * random alphanumeric fixed length payload.
*/
- char[] ALPHA_NUMS = new char[] { 'a', 'b', 'c', 'd', 'e', 'f',
- 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',
- 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D',
- 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',
- 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2',
- '3', '4', '5', '6', '7', '8', '9', '0', ' ' };
+ char[] ALPHA_NUMS = new char[] {'a', 'b', 'c', 'd', 'e', 'f',
+ 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',
+ 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D',
+ 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',
+ 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2',
+ '3', '4', '5', '6', '7', '8', '9', '0', ' '};
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
index b10ae049f8..16d14a18c8 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
@@ -39,11 +39,13 @@
* configuration.
*/
class SimpleEntityWriterV1
- extends org.apache.hadoop.mapreduce.Mapper
+ extends org.apache.hadoop.mapreduce.Mapper
+
implements SimpleEntityWriterConstants {
private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV1.class);
- public void map(IntWritable key, IntWritable val, Context context) throws IOException {
+ public void map(IntWritable key, IntWritable val, Context context)
+ throws IOException {
TimelineClient tlc = new TimelineClientImpl();
Configuration conf = context.getConfiguration();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java
index 4d8b74bd7b..dcc3ce014b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java
@@ -90,9 +90,10 @@ private TimelineEntity createJobEntity(JobInfo jobInfo, Configuration conf) {
return job;
}
- private Set createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
+ private Set
+ createTaskAndTaskAttemptEntities(JobInfo jobInfo) {
Set entities = new HashSet<>();
- Map taskInfoMap = jobInfo.getAllTasks();
+ Map taskInfoMap = jobInfo.getAllTasks();
LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() +
" tasks");
for (TaskInfo taskInfo: taskInfoMap.values()) {
@@ -124,7 +125,7 @@ private TimelineEntity createTaskEntity(TaskInfo taskInfo) {
private Set createTaskAttemptEntities(TaskInfo taskInfo) {
Set taskAttempts = new HashSet();
- Map taskAttemptInfoMap =
+ Map taskAttemptInfoMap =
taskInfo.getAllTaskAttempts();
LOG.info("task " + taskInfo.getTaskId() + " has " +
taskAttemptInfoMap.size() + " task attempts");
@@ -135,7 +136,8 @@ private Set createTaskAttemptEntities(TaskInfo taskInfo) {
return taskAttempts;
}
- private TimelineEntity createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) {
+ private TimelineEntity
+ createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) {
TimelineEntity taskAttempt = new TimelineEntity();
taskAttempt.setEntityType(TASK_ATTEMPT);
taskAttempt.setEntityId(taskAttemptInfo.getAttemptId().toString());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java
index 79633d2835..45812fe7da 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java
@@ -27,11 +27,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.CounterGroup;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
@@ -109,7 +104,7 @@ private TimelineEntity createJobEntity(JobInfo jobInfo, Configuration conf) {
}
private void addConfiguration(TimelineEntity job, Configuration conf) {
- for (Map.Entry e: conf) {
+ for (Map.Entry e: conf) {
job.addConfig(e.getKey(), e.getValue());
}
}
@@ -130,7 +125,7 @@ private void addMetrics(TimelineEntity entity, Counters counters) {
private List createTaskAndTaskAttemptEntities(
JobInfo jobInfo) {
List entities = new ArrayList<>();
- Map taskInfoMap = jobInfo.getAllTasks();
+ Map taskInfoMap = jobInfo.getAllTasks();
LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() +
" tasks");
for (TaskInfo taskInfo: taskInfoMap.values()) {
@@ -167,7 +162,7 @@ private TimelineEntity createTaskEntity(TaskInfo taskInfo) {
private Set createTaskAttemptEntities(TaskInfo taskInfo) {
Set taskAttempts = new HashSet();
- Map taskAttemptInfoMap =
+ Map taskAttemptInfoMap =
taskInfo.getAllTaskAttempts();
LOG.info("task " + taskInfo.getTaskId() + " has " +
taskAttemptInfoMap.size() + " task attempts");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java
index 1a14137bd4..7fa0444840 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java
@@ -137,7 +137,8 @@ public static boolean parseArgs(String[] args, Job job) throws IOException {
default:
// use the current timestamp as the "run id" of the test: this will
// be used as simulating the cluster timestamp for apps
- conf.setLong(SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID,
+ conf.setLong(
+ SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID,
System.currentTimeMillis());
switch (timeline_service_version) {
case TIMELINE_SERVICE_VERSION_2:
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
index edb825de33..2d3d6ed9e4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
@@ -181,10 +181,11 @@ public void serviceInit(Configuration conf) throws Exception {
}
if (enableTimelineAuxService) {
conf.setStrings(YarnConfiguration.NM_AUX_SERVICES,
- new String[] { ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID, TIMELINE_AUX_SERVICE_NAME });
+ new String[] {ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID,
+ TIMELINE_AUX_SERVICE_NAME});
} else {
conf.setStrings(YarnConfiguration.NM_AUX_SERVICES,
- new String[] { ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID });
+ new String[] {ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID});
}
conf.setClass(String.format(YarnConfiguration.NM_AUX_SERVICE_FMT,
ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID), ShuffleHandler.class,
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
index c43bd62466..43805c8c98 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java
@@ -24,4 +24,3 @@
package org.apache.hadoop.yarn.api.records.timelineservice;
import org.apache.hadoop.classification.InterfaceAudience;
-
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 3787ff6364..3bb73f51fa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -500,7 +500,8 @@ public static boolean isAclEnabled(Configuration conf) {
/**
* The setting that controls whether yarn system metrics is published on the
- * timeline server or not by RM and NM. This configuration setting is for ATS V2
+ * timeline server or not by RM and NM. This configuration setting is for
+ * ATS v2.
*/
public static final String SYSTEM_METRICS_PUBLISHER_ENABLED = YARN_PREFIX
+ "system-metrics-publisher.enabled";
@@ -840,7 +841,7 @@ public static boolean isAclEnabled(Configuration conf) {
/** Number of threads container manager uses.*/
public static final String NM_COLLECTOR_SERVICE_THREAD_COUNT =
- NM_PREFIX + "collector-service.thread-count";
+ NM_PREFIX + "collector-service.thread-count";
public static final int DEFAULT_NM_COLLECTOR_SERVICE_THREAD_COUNT = 5;
/** Number of threads used in cleanup.*/
@@ -872,7 +873,7 @@ public static boolean isAclEnabled(Configuration conf) {
/** Address where the collector service IPC is.*/
public static final String NM_COLLECTOR_SERVICE_ADDRESS =
- NM_PREFIX + "collector-service.address";
+ NM_PREFIX + "collector-service.address";
public static final int DEFAULT_NM_COLLECTOR_SERVICE_PORT = 8048;
public static final String DEFAULT_NM_COLLECTOR_SERVICE_ADDRESS =
"0.0.0.0:" + DEFAULT_NM_LOCALIZER_PORT;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java
index ff6ebbd712..e0268a67b8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java
@@ -36,6 +36,8 @@ private TimelineServiceHelper() {
/**
* Cast map to HashMap for generic type.
* @param originalMap the map need to be casted
+ * @param key type
+ * @param value type
* @return casted HashMap object
*/
public static HashMap mapCastToHashMap(
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
index 907d09ecb8..b9949e1aed 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
@@ -102,7 +102,6 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
-import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import org.apache.log4j.LogManager;
@@ -747,7 +746,7 @@ protected boolean finish() {
DSEvent.DS_APP_ATTEMPT_END);
} else {
publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(),
- DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi);
+ DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi);
}
}
@@ -858,7 +857,7 @@ public void onContainersCompleted(List completedContainers) {
publishContainerEndEventOnTimelineServiceV2(containerStatus);
} else {
publishContainerEndEvent(
- timelineClient, containerStatus, domainId, appSubmitterUgi);
+ timelineClient, containerStatus, domainId, appSubmitterUgi);
}
}
}
@@ -988,8 +987,8 @@ public void onContainerStarted(ContainerId containerId,
container);
} else {
applicationMaster.publishContainerStartEvent(
- applicationMaster.timelineClient, container,
- applicationMaster.domainId, applicationMaster.appSubmitterUgi);
+ applicationMaster.timelineClient, container,
+ applicationMaster.domainId, applicationMaster.appSubmitterUgi);
}
}
}
@@ -1348,8 +1347,10 @@ Thread createLaunchContainerThread(Container allocatedContainer,
private void publishContainerStartEventOnTimelineServiceV2(
Container container) {
- final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
- new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity();
+ final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+ entity =
+ new org.apache.hadoop.yarn.api.records.timelineservice.
+ TimelineEntity();
entity.setId(container.getId().toString());
entity.setType(DSEntity.DS_CONTAINER.toString());
long ts = System.currentTimeMillis();
@@ -1381,8 +1382,10 @@ public TimelinePutResponse run() throws Exception {
private void publishContainerEndEventOnTimelineServiceV2(
final ContainerStatus container) {
- final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
- new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity();
+ final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+ entity =
+ new org.apache.hadoop.yarn.api.records.timelineservice.
+ TimelineEntity();
entity.setId(container.getContainerId().toString());
entity.setType(DSEntity.DS_CONTAINER.toString());
//entity.setDomainId(domainId);
@@ -1412,8 +1415,10 @@ public TimelinePutResponse run() throws Exception {
private void publishApplicationAttemptEventOnTimelineServiceV2(
DSEvent appEvent) {
- final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
- new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity();
+ final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+ entity =
+ new org.apache.hadoop.yarn.api.records.timelineservice.
+ TimelineEntity();
entity.setId(appAttemptID.toString());
entity.setType(DSEntity.DS_APP_ATTEMPT.toString());
long ts = System.currentTimeMillis();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
index a01d21b611..b9b8c7f79b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
@@ -153,7 +153,8 @@ private void setupInternal(int numNodeManager, float timelineVersion)
ProcfsBasedProcessTree.class.getName());
conf.setBoolean(YarnConfiguration.NM_PMEM_CHECK_ENABLED, true);
conf.setBoolean(YarnConfiguration.NM_VMEM_CHECK_ENABLED, true);
- conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING,
+ conf.setBoolean(
+ YarnConfiguration.YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING,
true);
conf.setBoolean(YarnConfiguration.RM_SYSTEM_METRICS_PUBLISHER_ENABLED,
true);
@@ -180,11 +181,13 @@ private void setupInternal(int numNodeManager, float timelineVersion)
// disable v1 timeline server since we no longer have a server here
// enable aux-service based timeline aggregators
conf.set(YarnConfiguration.NM_AUX_SERVICES, TIMELINE_AUX_SERVICE_NAME);
- conf.set(YarnConfiguration.NM_AUX_SERVICES + "." + TIMELINE_AUX_SERVICE_NAME
- + ".class", PerNodeTimelineCollectorsAuxService.class.getName());
+ conf.set(YarnConfiguration.NM_AUX_SERVICES + "." +
+ TIMELINE_AUX_SERVICE_NAME + ".class",
+ PerNodeTimelineCollectorsAuxService.class.getName());
conf.setClass(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS,
FileSystemTimelineWriterImpl.class,
- org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter.class);
+ org.apache.hadoop.yarn.server.timelineservice.storage.
+ TimelineWriter.class);
} else {
Assert.fail("Wrong timeline version number: " + timelineVersion);
}
@@ -395,7 +398,8 @@ public void run() {
}
if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED
- && appReport.getFinalApplicationStatus() != FinalApplicationStatus.UNDEFINED) {
+ && appReport.getFinalApplicationStatus() !=
+ FinalApplicationStatus.UNDEFINED) {
break;
}
}
@@ -431,7 +435,7 @@ public void run() {
}
private void checkTimelineV1(boolean haveDomain) throws Exception {
- TimelineDomain domain = null;
+ TimelineDomain domain = null;
if (haveDomain) {
domain = yarnCluster.getApplicationHistoryServer()
.getTimelineStore().getDomain("TEST_DOMAIN");
@@ -545,7 +549,7 @@ private void checkTimelineV2(boolean haveDomain, ApplicationId appId,
if (numOfContainerFinishedOccurences > 0) {
break;
} else {
- Thread.sleep(500l);
+ Thread.sleep(500L);
}
}
Assert.assertEquals(
@@ -577,7 +581,7 @@ private void checkTimelineV2(boolean haveDomain, ApplicationId appId,
if (numOfStringOccurences > 0) {
break;
} else {
- Thread.sleep(500l);
+ Thread.sleep(500L);
}
}
Assert.assertEquals(
@@ -631,8 +635,9 @@ private long getNumOfStringOccurences(File entityFile, String searchString)
try {
reader = new BufferedReader(new FileReader(entityFile));
while ((strLine = reader.readLine()) != null) {
- if (strLine.trim().contains(searchString))
+ if (strLine.trim().contains(searchString)) {
actualCount++;
+ }
}
} finally {
reader.close();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java
index c55f20216b..c651d32cfa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java
@@ -30,9 +30,7 @@
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.junit.Assert;
import org.junit.Before;
-import org.junit.Rule;
import org.junit.Test;
-import org.junit.rules.TestName;
import com.google.common.collect.ImmutableMap;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
index 7f64dd52f9..7acaf11191 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java
@@ -463,15 +463,15 @@ public NMTokenCache getNMTokenCache() {
/**
* Register TimelineClient to AMRMClient.
- * @param timelineClient
+ * @param client the timeline client to register
*/
- public void registerTimelineClient(TimelineClient timelineClient) {
- this.timelineClient = timelineClient;
+ public void registerTimelineClient(TimelineClient client) {
+ this.timelineClient = client;
}
/**
* Get registered timeline client.
- * @return
+ * @return the registered timeline client
*/
public TimelineClient getRegisteredTimeineClient() {
return this.timelineClient;
@@ -481,7 +481,7 @@ public TimelineClient getRegisteredTimeineClient() {
* Wait for check
to return true for each 1000 ms.
* See also {@link #waitFor(com.google.common.base.Supplier, int)}
* and {@link #waitFor(com.google.common.base.Supplier, int, int)}
- * @param check
+ * @param check the condition for which it should wait
*/
public void waitFor(Supplier check) throws InterruptedException {
waitFor(check, 1000);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java
index 7cdda1be25..28d20c8d43 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java
@@ -304,7 +304,7 @@ public void registerTimelineClient(TimelineClient timelineClient) {
/**
* Get registered timeline client.
- * @return
+ * @return the registered timeline client
*/
public TimelineClient getRegisteredTimeineClient() {
return client.getRegisteredTimeineClient();
@@ -325,7 +325,7 @@ public abstract void updateBlacklist(List blacklistAdditions,
* Wait for check
to return true for each 1000 ms.
* See also {@link #waitFor(com.google.common.base.Supplier, int)}
* and {@link #waitFor(com.google.common.base.Supplier, int, int)}
- * @param check
+ * @param check the condition for which it should wait
*/
public void waitFor(Supplier check) throws InterruptedException {
waitFor(check, 1000);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
index e043c2f2c5..cc76718dd0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java
@@ -30,8 +30,6 @@
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
@@ -55,10 +53,12 @@ public abstract class TimelineClient extends AbstractService implements
* construct and initialize a timeline client if the following operations are
* supposed to be conducted by that user.
*/
- protected ApplicationId contextAppId;
+ private ApplicationId contextAppId;
/**
* Creates an instance of the timeline v.1.x client.
+ *
+ * @return the created timeline client instance
*/
@Public
public static TimelineClient createTimelineClient() {
@@ -68,6 +68,10 @@ public static TimelineClient createTimelineClient() {
/**
* Creates an instance of the timeline v.2 client.
+ *
+ * @param appId the application id with which the timeline client is
+ * associated
+ * @return the created timeline client instance
*/
@Public
public static TimelineClient createTimelineClient(ApplicationId appId) {
@@ -91,8 +95,8 @@ protected TimelineClient(String name, ApplicationId appId) {
* @param entities
* the collection of {@link TimelineEntity}
* @return the error information if the sent entities are not correctly stored
- * @throws IOException
- * @throws YarnException
+ * @throws IOException if there are I/O errors
+ * @throws YarnException if entities are incomplete/invalid
*/
@Public
public abstract TimelinePutResponse putEntities(
@@ -112,8 +116,8 @@ public abstract TimelinePutResponse putEntities(
* @param entities
* the collection of {@link TimelineEntity}
* @return the error information if the sent entities are not correctly stored
- * @throws IOException
- * @throws YarnException
+ * @throws IOException if there are I/O errors
+ * @throws YarnException if entities are incomplete/invalid
*/
@Public
public abstract TimelinePutResponse putEntities(
@@ -212,15 +216,15 @@ public abstract void cancelDelegationToken(
* for a non-v.2 timeline client instance, a YarnException is thrown.
*
*
- * @param entities
- * the collection of {@link org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
+ * @param entities the collection of {@link
+ * org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
* @throws IOException
* @throws YarnException
*/
@Public
public abstract void putEntities(
- org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities)
- throws IOException, YarnException;
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity...
+ entities) throws IOException, YarnException;
/**
*
@@ -230,15 +234,15 @@ public abstract void putEntities(
* non-v.2 timeline client instance, a YarnException is thrown.
*
*
- * @param entities
- * the collection of {@link org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
+ * @param entities the collection of {@link
+ * org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity}
* @throws IOException
* @throws YarnException
*/
@Public
public abstract void putEntitiesAsync(
- org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities)
- throws IOException, YarnException;
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity...
+ entities) throws IOException, YarnException;
/**
*
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index 478efc4a31..4a5a443d02 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -391,8 +391,8 @@ public TimelinePutResponse putEntities(
@Override
public void putEntities(
- org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities)
- throws IOException, YarnException {
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity...
+ entities) throws IOException, YarnException {
if (!timelineServiceV2) {
throw new YarnException("v.2 method is invoked on a v.1.x client");
}
@@ -401,8 +401,8 @@ public void putEntities(
@Override
public void putEntitiesAsync(
- org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities)
- throws IOException, YarnException {
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity...
+ entities) throws IOException, YarnException {
if (!timelineServiceV2) {
throw new YarnException("v.2 method is invoked on a v.1.x client");
}
@@ -494,7 +494,8 @@ protected void putObjects(
throw new IOException(re);
}
if (resp == null ||
- resp.getClientResponseStatus() != ClientResponse.Status.OK) {
+ resp.getStatusInfo().getStatusCode() !=
+ ClientResponse.Status.OK.getStatusCode()) {
String msg = "Response from the timeline server is " +
((resp == null) ? "null":
"not successful," + " HTTP error code: " + resp.getStatus()
@@ -530,7 +531,8 @@ public Token run()
// TODO we should add retry logic here if timelineServiceAddress is
// not available immediately.
return (Token) authUrl.getDelegationToken(
- constructResURI(getConfig(), getTimelineServiceAddress(), false).toURL(),
+ constructResURI(getConfig(),
+ getTimelineServiceAddress(), false).toURL(),
token, renewer, doAsUser);
}
};
@@ -911,17 +913,21 @@ public boolean shouldRetryOn(Exception e) {
}
private final class EntitiesHolder extends FutureTask {
- private final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities;
+ private final
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities
+ entities;
private final boolean isSync;
EntitiesHolder(
- final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities,
+ final
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities
+ entities,
final boolean isSync) {
super(new Callable() {
// publishEntities()
public Void call() throws Exception {
MultivaluedMap params = new MultivaluedMapImpl();
- params.add("appid", contextAppId.toString());
+ params.add("appid", getContextAppId().toString());
params.add("async", Boolean.toString(!isSync));
putObjects("entities", params, entities);
return null;
@@ -935,7 +941,8 @@ public boolean isSync() {
return isSync;
}
- public org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities getEntities() {
+ public org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities
+ getEntities() {
return entities;
}
}
@@ -947,7 +954,7 @@ public org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities getEn
private class TimelineEntityDispatcher {
/**
* Time period for which the timelineclient will wait for draining after
- * stop
+ * stop.
*/
private static final long DRAIN_TIME_PERIOD = 2000L;
@@ -1063,17 +1070,20 @@ private void publishWithoutBlockingOnQueue(
}
public void dispatchEntities(boolean sync,
- org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity[] entitiesTobePublished)
- throws YarnException {
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity[]
+ entitiesTobePublished) throws YarnException {
if (executor.isShutdown()) {
throw new YarnException("Timeline client is in the process of stopping,"
+ " not accepting any more TimelineEntities");
}
// wrap all TimelineEntity into TimelineEntities object
- org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities =
- new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities();
- for (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity : entitiesTobePublished) {
+ org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities
+ entities =
+ new org.apache.hadoop.yarn.api.records.timelineservice.
+ TimelineEntities();
+ for (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+ entity : entitiesTobePublished) {
entities.addEntity(entity);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java
index 592bfa3ad6..221969bdd6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java
@@ -100,7 +100,8 @@ public void testTimelineEntities() throws Exception {
}
entity.addMetric(metric2);
- TimelineMetric metric3 = new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE);
+ TimelineMetric metric3 =
+ new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE);
metric3.setId("test metric id 1");
metric3.addValue(4L, (short) 4);
Assert.assertEquals("metric3 should equal to metric2! ", metric3, metric2);
@@ -212,18 +213,22 @@ public void testFirstClassCitizenEntities() throws Exception {
ApplicationAttemptId.newInstance(
ApplicationId.newInstance(0, 1), 1), 1).toString());
- cluster.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(), flow1.getId());
+ cluster.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(),
+ flow1.getId());
flow1
.setParent(TimelineEntityType.YARN_CLUSTER.toString(), cluster.getId());
flow1.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId());
flow2.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow1.getId());
- flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(), app1.getId());
- flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(), app2.getId());
+ flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(),
+ app1.getId());
+ flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(),
+ app2.getId());
app1.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId());
app1.addChild(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
appAttempt.getId());
appAttempt
- .setParent(TimelineEntityType.YARN_APPLICATION.toString(), app1.getId());
+ .setParent(TimelineEntityType.YARN_APPLICATION.toString(),
+ app1.getId());
app2.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId());
appAttempt.addChild(TimelineEntityType.YARN_CONTAINER.toString(),
container.getId());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java
index 71dafdc846..5813340da2 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java
@@ -43,7 +43,7 @@ public class TestTimelineClientV2Impl {
private static final Log LOG =
LogFactory.getLog(TestTimelineClientV2Impl.class);
private TestV2TimelineClient client;
- private static long TIME_TO_SLEEP = 150;
+ private static final long TIME_TO_SLEEP = 150L;
private static final String EXCEPTION_MSG = "Exception in the content";
@Before
@@ -62,12 +62,12 @@ public void setup() {
public TestName currTestName = new TestName();
private YarnConfiguration conf;
- private TestV2TimelineClient createTimelineClient(YarnConfiguration conf) {
+ private TestV2TimelineClient createTimelineClient(YarnConfiguration config) {
ApplicationId id = ApplicationId.newInstance(0, 0);
- TestV2TimelineClient client = new TestV2TimelineClient(id);
- client.init(conf);
- client.start();
- return client;
+ TestV2TimelineClient tc = new TestV2TimelineClient(id);
+ tc.init(config);
+ tc.start();
+ return tc;
}
private class TestV2TimelineClientForExceptionHandling
@@ -76,12 +76,16 @@ public TestV2TimelineClientForExceptionHandling(ApplicationId id) {
super(id);
}
- protected boolean throwYarnException;
+ private boolean throwYarnException;
public void setThrowYarnException(boolean throwYarnException) {
this.throwYarnException = throwYarnException;
}
+ public boolean isThrowYarnException() {
+ return throwYarnException;
+ }
+
@Override
protected void putObjects(URI base, String path,
MultivaluedMap params, Object obj)
@@ -123,7 +127,7 @@ public TestV2TimelineClient(ApplicationId id) {
protected void putObjects(String path,
MultivaluedMap params, Object obj)
throws IOException, YarnException {
- if (throwYarnException) {
+ if (isThrowYarnException()) {
throw new YarnException("ActualException");
}
publishedEntities.add((TimelineEntities) obj);
@@ -139,17 +143,17 @@ protected void putObjects(String path,
@Test
public void testExceptionMultipleRetry() {
- TestV2TimelineClientForExceptionHandling client =
+ TestV2TimelineClientForExceptionHandling c =
new TestV2TimelineClientForExceptionHandling(
ApplicationId.newInstance(0, 0));
int maxRetries = 2;
conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES,
maxRetries);
- client.init(conf);
- client.start();
- client.setTimelineServiceAddress("localhost:12345");
+ c.init(conf);
+ c.start();
+ c.setTimelineServiceAddress("localhost:12345");
try {
- client.putEntities(new TimelineEntity());
+ c.putEntities(new TimelineEntity());
} catch (IOException e) {
Assert.fail("YARN exception is expected");
} catch (YarnException e) {
@@ -161,9 +165,9 @@ public void testExceptionMultipleRetry() {
"TimelineClient has reached to max retry times : " + maxRetries));
}
- client.setThrowYarnException(true);
+ c.setThrowYarnException(true);
try {
- client.putEntities(new TimelineEntity());
+ c.putEntities(new TimelineEntity());
} catch (IOException e) {
Assert.fail("YARN exception is expected");
} catch (YarnException e) {
@@ -173,7 +177,7 @@ public void testExceptionMultipleRetry() {
Assert.assertTrue("YARN exception is expected",
cause.getMessage().contains(EXCEPTION_MSG));
}
- client.stop();
+ c.stop();
}
@Test
@@ -348,7 +352,6 @@ private void printReceivedEntities() {
for (int i = 0; i < client.getNumOfTimelineEntitiesPublished(); i++) {
TimelineEntities publishedEntities = client.getPublishedEntities(i);
StringBuilder entitiesPerPublish = new StringBuilder();
- ;
for (TimelineEntity entity : publishedEntities.getEntities()) {
entitiesPerPublish.append(entity.getId());
entitiesPerPublish.append(",");
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java
index 34b9497f80..d3d815b218 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java
@@ -37,18 +37,21 @@ public void testMapCastToHashMap() {
// Test empty hashmap be casted to a empty hashmap
Map emptyHashMap = new HashMap();
- Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(emptyHashMap).size(), 0);
+ Assert.assertEquals(
+ TimelineServiceHelper.mapCastToHashMap(emptyHashMap).size(), 0);
// Test empty non-hashmap be casted to a empty hashmap
Map emptyTreeMap = new TreeMap();
- Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(emptyTreeMap).size(), 0);
+ Assert.assertEquals(
+ TimelineServiceHelper.mapCastToHashMap(emptyTreeMap).size(), 0);
// Test non-empty hashmap be casted to hashmap correctly
Map firstHashMap = new HashMap();
String key = "KEY";
String value = "VALUE";
firstHashMap.put(key, value);
- Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(firstHashMap), firstHashMap);
+ Assert.assertEquals(
+ TimelineServiceHelper.mapCastToHashMap(firstHashMap), firstHashMap);
// Test non-empty non-hashmap is casted correctly.
Map firstTreeMap = new TreeMap();
@@ -59,17 +62,21 @@ public void testMapCastToHashMap() {
Assert.assertEquals(alternateHashMap.get(key), value);
// Test complicated hashmap be casted correctly
- Map> complicatedHashMap = new HashMap>();
+ Map> complicatedHashMap =
+ new HashMap>();
Set hashSet = new HashSet();
hashSet.add(value);
complicatedHashMap.put(key, hashSet);
- Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(complicatedHashMap),
+ Assert.assertEquals(
+ TimelineServiceHelper.mapCastToHashMap(complicatedHashMap),
complicatedHashMap);
// Test complicated non-hashmap get casted correctly
- Map> complicatedTreeMap = new TreeMap>();
+ Map> complicatedTreeMap =
+ new TreeMap>();
complicatedTreeMap.put(key, hashSet);
- Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(complicatedTreeMap).get(key),
+ Assert.assertEquals(
+ TimelineServiceHelper.mapCastToHashMap(complicatedTreeMap).get(key),
hashSet);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java
index d23c04a51c..64eea63160 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java
@@ -48,9 +48,9 @@ public interface CollectorNodemanagerProtocol {
*
* @param request the request of registering a new collector or a list of
* collectors
- * @return
- * @throws YarnException
- * @throws IOException
+ * @return the response for registering the new collector
+ * @throws YarnException if the request is invalid
+ * @throws IOException if there are I/O errors
*/
ReportNewCollectorInfoResponse reportNewCollectorInfo(
ReportNewCollectorInfoRequest request)
@@ -63,9 +63,9 @@ ReportNewCollectorInfoResponse reportNewCollectorInfo(
*
* @param request the request of getting the aggregator context information of
* the given application
- * @return
- * @throws YarnException
- * @throws IOException
+ * @return the response for registering the new collector
+ * @throws YarnException if the request is invalid
+ * @throws IOException if there are I/O errors
*/
GetTimelineCollectorContextResponse getTimelineCollectorContext(
GetTimelineCollectorContextRequest request)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java
index 655e9890a2..24f7c3dd53 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java
@@ -25,7 +25,8 @@
@Private
@Unstable
@ProtocolInfo(
- protocolName = "org.apache.hadoop.yarn.server.api.CollectorNodemanagerProtocolPB",
+ protocolName =
+ "org.apache.hadoop.yarn.server.api.CollectorNodemanagerProtocolPB",
protocolVersion = 1)
public interface CollectorNodemanagerProtocolPB extends
CollectorNodemanagerProtocolService.BlockingInterface {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java
index b9e17f22ea..bc50ac534c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java
@@ -53,7 +53,7 @@ public class CollectorNodemanagerProtocolPBClientImpl implements
+ "rpc.nm-command-timeout";
/**
- * Maximum of 1 minute timeout for a Node to react to the command
+ * Maximum of 1 minute timeout for a Node to react to the command.
*/
static final int DEFAULT_COMMAND_TIMEOUT = 60000;
@@ -63,7 +63,7 @@ public class CollectorNodemanagerProtocolPBClientImpl implements
public CollectorNodemanagerProtocolPBClientImpl(long clientVersion,
InetSocketAddress addr, Configuration conf) throws IOException {
RPC.setProtocolEngine(conf, CollectorNodemanagerProtocolPB.class,
- ProtobufRpcEngine.class);
+ ProtobufRpcEngine.class);
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
int expireIntvl = conf.getInt(NM_COMMAND_TIMEOUT, DEFAULT_COMMAND_TIMEOUT);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java
index 21fb270285..7b93a68a29 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java
@@ -41,7 +41,8 @@ public class CollectorNodemanagerProtocolPBServiceImpl implements
private CollectorNodemanagerProtocol real;
- public CollectorNodemanagerProtocolPBServiceImpl(CollectorNodemanagerProtocol impl) {
+ public CollectorNodemanagerProtocolPBServiceImpl(
+ CollectorNodemanagerProtocol impl) {
this.real = impl;
}
@@ -52,7 +53,8 @@ public ReportNewCollectorInfoResponseProto reportNewCollectorInfo(
ReportNewCollectorInfoRequestPBImpl request =
new ReportNewCollectorInfoRequestPBImpl(proto);
try {
- ReportNewCollectorInfoResponse response = real.reportNewCollectorInfo(request);
+ ReportNewCollectorInfoResponse response =
+ real.reportNewCollectorInfo(request);
return ((ReportNewCollectorInfoResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java
index b53b55b14f..7014388c80 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java
@@ -29,10 +29,10 @@
public class GetTimelineCollectorContextRequestPBImpl extends
GetTimelineCollectorContextRequest {
- GetTimelineCollectorContextRequestProto
+ private GetTimelineCollectorContextRequestProto
proto = GetTimelineCollectorContextRequestProto.getDefaultInstance();
- GetTimelineCollectorContextRequestProto.Builder builder = null;
- boolean viaProto = false;
+ private GetTimelineCollectorContextRequestProto.Builder builder = null;
+ private boolean viaProto = false;
private ApplicationId appId = null;
@@ -60,8 +60,9 @@ public int hashCode() {
@Override
public boolean equals(Object other) {
- if (other == null)
+ if (other == null) {
return false;
+ }
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
@@ -80,8 +81,9 @@ private void mergeLocalToBuilder() {
}
private void mergeLocalToProto() {
- if (viaProto)
+ if (viaProto) {
maybeInitBuilder();
+ }
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
@@ -100,7 +102,8 @@ public ApplicationId getApplicationId() {
return this.appId;
}
- GetTimelineCollectorContextRequestProtoOrBuilder p = viaProto ? proto : builder;
+ GetTimelineCollectorContextRequestProtoOrBuilder p =
+ viaProto ? proto : builder;
if (!p.hasAppId()) {
return null;
}
@@ -110,14 +113,16 @@ public ApplicationId getApplicationId() {
}
@Override
- public void setApplicationId(ApplicationId appId) {
+ public void setApplicationId(ApplicationId id) {
maybeInitBuilder();
- if (appId == null)
+ if (id == null) {
builder.clearAppId();
- this.appId = appId;
+ }
+ this.appId = id;
}
- private ApplicationIdPBImpl convertFromProtoFormat(YarnProtos.ApplicationIdProto p) {
+ private ApplicationIdPBImpl convertFromProtoFormat(
+ YarnProtos.ApplicationIdProto p) {
return new ApplicationIdPBImpl(p);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java
index 34713cb773..151b0363ec 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java
@@ -26,10 +26,10 @@
public class GetTimelineCollectorContextResponsePBImpl extends
GetTimelineCollectorContextResponse {
- GetTimelineCollectorContextResponseProto proto =
+ private GetTimelineCollectorContextResponseProto proto =
GetTimelineCollectorContextResponseProto.getDefaultInstance();
- GetTimelineCollectorContextResponseProto.Builder builder = null;
- boolean viaProto = false;
+ private GetTimelineCollectorContextResponseProto.Builder builder = null;
+ private boolean viaProto = false;
public GetTimelineCollectorContextResponsePBImpl() {
builder = GetTimelineCollectorContextResponseProto.newBuilder();
@@ -55,8 +55,9 @@ public int hashCode() {
@Override
public boolean equals(Object other) {
- if (other == null)
+ if (other == null) {
return false;
+ }
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
@@ -69,8 +70,9 @@ public String toString() {
}
private void mergeLocalToProto() {
- if (viaProto)
+ if (viaProto) {
maybeInitBuilder();
+ }
proto = builder.build();
viaProto = true;
}
@@ -84,7 +86,8 @@ private void maybeInitBuilder() {
@Override
public String getUserId() {
- GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder;
+ GetTimelineCollectorContextResponseProtoOrBuilder p =
+ viaProto ? proto : builder;
if (!p.hasUserId()) {
return null;
}
@@ -103,7 +106,8 @@ public void setUserId(String userId) {
@Override
public String getFlowName() {
- GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder;
+ GetTimelineCollectorContextResponseProtoOrBuilder p =
+ viaProto ? proto : builder;
if (!p.hasFlowName()) {
return null;
}
@@ -122,7 +126,8 @@ public void setFlowName(String flowName) {
@Override
public String getFlowVersion() {
- GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder;
+ GetTimelineCollectorContextResponseProtoOrBuilder p =
+ viaProto ? proto : builder;
if (!p.hasFlowVersion()) {
return null;
}
@@ -141,7 +146,8 @@ public void setFlowVersion(String flowVersion) {
@Override
public long getFlowRunId() {
- GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder;
+ GetTimelineCollectorContextResponseProtoOrBuilder p =
+ viaProto ? proto : builder;
return p.getFlowRunId();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
index 912a7a3c45..d0c11985db 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
@@ -58,7 +58,7 @@ public class NodeHeartbeatRequestPBImpl extends NodeHeartbeatRequest {
private Set labels = null;
private List logAggregationReportsForApps = null;
- Map registeredCollectors = null;
+ private Map registeredCollectors = null;
public NodeHeartbeatRequestPBImpl() {
builder = NodeHeartbeatRequestProto.newBuilder();
@@ -161,10 +161,11 @@ private LogAggregationReportProto convertToProtoFormat(
private void addRegisteredCollectorsToProto() {
maybeInitBuilder();
builder.clearRegisteredCollectors();
- for (Map.Entry entry : registeredCollectors.entrySet()) {
+ for (Map.Entry entry :
+ registeredCollectors.entrySet()) {
builder.addRegisteredCollectors(AppCollectorsMapProto.newBuilder()
- .setAppId(convertToProtoFormat(entry.getKey()))
- .setAppCollectorAddr(entry.getValue()));
+ .setAppId(convertToProtoFormat(entry.getKey()))
+ .setAppCollectorAddr(entry.getValue()));
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java
index 788ef588d6..cd85241c51 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java
@@ -69,7 +69,7 @@ public class NodeHeartbeatResponsePBImpl extends
private List applicationsToCleanup = null;
private Map systemCredentials = null;
private Resource resource = null;
- Map appCollectorsMap = null;
+ private Map appCollectorsMap = null;
private MasterKey containerTokenMasterKey = null;
private MasterKey nmTokenMasterKey = null;
@@ -148,8 +148,8 @@ private void addAppCollectorsMapToProto() {
builder.clearAppCollectorsMap();
for (Map.Entry entry : appCollectorsMap.entrySet()) {
builder.addAppCollectorsMap(AppCollectorsMapProto.newBuilder()
- .setAppId(convertToProtoFormat(entry.getKey()))
- .setAppCollectorAddr(entry.getValue()));
+ .setAppId(convertToProtoFormat(entry.getKey()))
+ .setAppCollectorAddr(entry.getValue()));
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoRequestPBImpl.java
index 5dd8f17fed..c6f66194e5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoRequestPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoRequestPBImpl.java
@@ -30,11 +30,11 @@
public class ReportNewCollectorInfoRequestPBImpl extends
ReportNewCollectorInfoRequest {
- ReportNewCollectorInfoRequestProto proto =
+ private ReportNewCollectorInfoRequestProto proto =
ReportNewCollectorInfoRequestProto.getDefaultInstance();
- ReportNewCollectorInfoRequestProto.Builder builder = null;
- boolean viaProto = false;
+ private ReportNewCollectorInfoRequestProto.Builder builder = null;
+ private boolean viaProto = false;
private List collectorsList = null;
@@ -62,8 +62,9 @@ public int hashCode() {
@Override
public boolean equals(Object other) {
- if (other == null)
+ if (other == null) {
return false;
+ }
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
@@ -71,8 +72,9 @@ public boolean equals(Object other) {
}
private void mergeLocalToProto() {
- if (viaProto)
+ if (viaProto) {
maybeInitBuilder();
+ }
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
@@ -104,10 +106,10 @@ private void addLocalCollectorsToProto() {
private void initLocalCollectorsList() {
ReportNewCollectorInfoRequestProtoOrBuilder p = viaProto ? proto : builder;
- List collectorsList =
+ List list =
p.getAppCollectorsList();
this.collectorsList = new ArrayList();
- for (AppCollectorsMapProto m : collectorsList) {
+ for (AppCollectorsMapProto m : list) {
this.collectorsList.add(convertFromProtoFormat(m));
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoResponsePBImpl.java
index 7c90675b6a..5f2a10aace 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoResponsePBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoResponsePBImpl.java
@@ -29,18 +29,19 @@
public class ReportNewCollectorInfoResponsePBImpl extends
ReportNewCollectorInfoResponse {
- ReportNewCollectorInfoResponseProto proto =
+ private ReportNewCollectorInfoResponseProto proto =
ReportNewCollectorInfoResponseProto.getDefaultInstance();
- ReportNewCollectorInfoResponseProto.Builder builder = null;
+ private ReportNewCollectorInfoResponseProto.Builder builder = null;
- boolean viaProto = false;
+ private boolean viaProto = false;
public ReportNewCollectorInfoResponsePBImpl() {
builder = ReportNewCollectorInfoResponseProto.newBuilder();
}
- public ReportNewCollectorInfoResponsePBImpl(ReportNewCollectorInfoResponseProto proto) {
+ public ReportNewCollectorInfoResponsePBImpl(
+ ReportNewCollectorInfoResponseProto proto) {
this.proto = proto;
viaProto = true;
}
@@ -58,8 +59,9 @@ public int hashCode() {
@Override
public boolean equals(Object other) {
- if (other == null)
+ if (other == null) {
return false;
+ }
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/AppCollectorsMapPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/AppCollectorsMapPBImpl.java
index eb3bde4df6..3740035d83 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/AppCollectorsMapPBImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/AppCollectorsMapPBImpl.java
@@ -33,11 +33,11 @@
@Unstable
public class AppCollectorsMapPBImpl extends AppCollectorsMap {
- AppCollectorsMapProto proto =
+ private AppCollectorsMapProto proto =
AppCollectorsMapProto.getDefaultInstance();
- AppCollectorsMapProto.Builder builder = null;
- boolean viaProto = false;
+ private AppCollectorsMapProto.Builder builder = null;
+ private boolean viaProto = false;
private ApplicationId appId = null;
private String collectorAddr = null;
@@ -52,7 +52,7 @@ public AppCollectorsMapPBImpl(AppCollectorsMapProto proto) {
}
public AppCollectorsMapProto getProto() {
- mergeLocalToProto();
+ mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
@@ -65,8 +65,9 @@ public int hashCode() {
@Override
public boolean equals(Object other) {
- if (other == null)
+ if (other == null) {
return false;
+ }
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
@@ -98,12 +99,12 @@ public String getCollectorAddr() {
}
@Override
- public void setApplicationId(ApplicationId appId) {
+ public void setApplicationId(ApplicationId id) {
maybeInitBuilder();
- if (appId == null) {
+ if (id == null) {
builder.clearAppId();
}
- this.appId = appId;
+ this.appId = id;
}
@Override
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java
index da8e22473f..e25f5281fb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java
@@ -77,7 +77,8 @@ public class TestRPC {
private static final String EXCEPTION_MSG = "test error";
private static final String EXCEPTION_CAUSE = "exception cause";
- private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
+ private static final RecordFactory RECORD_FACTORY =
+ RecordFactoryProvider.getRecordFactory(null);
public static final String ILLEGAL_NUMBER_MESSAGE =
"collectors' number in ReportNewCollectorInfoRequest is not ONE.";
@@ -101,7 +102,8 @@ public void testUnknownCall() {
// Any unrelated protocol would do
ApplicationClientProtocol proxy = (ApplicationClientProtocol) rpc.getProxy(
- ApplicationClientProtocol.class, NetUtils.getConnectAddress(server), conf);
+ ApplicationClientProtocol.class, NetUtils.getConnectAddress(server),
+ conf);
try {
proxy.getNewApplication(Records
@@ -111,7 +113,8 @@ public void testUnknownCall() {
Assert.assertTrue(e.getMessage().matches(
"Unknown method getNewApplication called on.*"
+ "org.apache.hadoop.yarn.proto.ApplicationClientProtocol"
- + "\\$ApplicationClientProtocolService\\$BlockingInterface protocol."));
+ + "\\$ApplicationClientProtocolService\\$BlockingInterface "
+ + "protocol."));
} catch (Exception e) {
e.printStackTrace();
} finally {
@@ -132,8 +135,10 @@ public void testRPCOnCollectorNodeManagerProtocol() throws IOException {
server.start();
// Test unrelated protocol wouldn't get response
- ApplicationClientProtocol unknownProxy = (ApplicationClientProtocol) rpc.getProxy(
- ApplicationClientProtocol.class, NetUtils.getConnectAddress(server), conf);
+ ApplicationClientProtocol unknownProxy =
+ (ApplicationClientProtocol) rpc.getProxy(
+ ApplicationClientProtocol.class, NetUtils.getConnectAddress(server),
+ conf);
try {
unknownProxy.getNewApplication(Records
@@ -143,14 +148,17 @@ public void testRPCOnCollectorNodeManagerProtocol() throws IOException {
Assert.assertTrue(e.getMessage().matches(
"Unknown method getNewApplication called on.*"
+ "org.apache.hadoop.yarn.proto.ApplicationClientProtocol"
- + "\\$ApplicationClientProtocolService\\$BlockingInterface protocol."));
+ + "\\$ApplicationClientProtocolService\\$BlockingInterface "
+ + "protocol."));
} catch (Exception e) {
e.printStackTrace();
}
// Test CollectorNodemanagerProtocol get proper response
- CollectorNodemanagerProtocol proxy = (CollectorNodemanagerProtocol)rpc.getProxy(
- CollectorNodemanagerProtocol.class, NetUtils.getConnectAddress(server), conf);
+ CollectorNodemanagerProtocol proxy =
+ (CollectorNodemanagerProtocol)rpc.getProxy(
+ CollectorNodemanagerProtocol.class, NetUtils.getConnectAddress(server),
+ conf);
// Verify request with DEFAULT_APP_ID and DEFAULT_COLLECTOR_ADDR get
// normally response.
try {
@@ -196,7 +204,8 @@ public void testRPCOnCollectorNodeManagerProtocol() throws IOException {
Assert.fail("RPC call failured is expected here.");
} catch (YarnException | IOException e) {
Assert.assertTrue(e instanceof YarnException);
- Assert.assertTrue(e.getMessage().contains("The application is not found."));
+ Assert.assertTrue(e.getMessage().contains(
+ "The application is not found."));
}
server.stop();
}
@@ -215,12 +224,13 @@ private void test(String rpcClass) throws Exception {
Server server = rpc.getServer(ContainerManagementProtocol.class,
new DummyContainerManager(), addr, conf, null, 1);
server.start();
- RPC.setProtocolEngine(conf, ContainerManagementProtocolPB.class, ProtobufRpcEngine.class);
+ RPC.setProtocolEngine(conf, ContainerManagementProtocolPB.class,
+ ProtobufRpcEngine.class);
ContainerManagementProtocol proxy = (ContainerManagementProtocol)
rpc.getProxy(ContainerManagementProtocol.class,
NetUtils.getConnectAddress(server), conf);
ContainerLaunchContext containerLaunchContext =
- recordFactory.newRecordInstance(ContainerLaunchContext.class);
+ RECORD_FACTORY.newRecordInstance(ContainerLaunchContext.class);
ApplicationId applicationId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId applicationAttemptId =
@@ -257,10 +267,10 @@ private void test(String rpcClass) throws Exception {
boolean exception = false;
try {
StopContainersRequest stopRequest =
- recordFactory.newRecordInstance(StopContainersRequest.class);
+ RECORD_FACTORY.newRecordInstance(StopContainersRequest.class);
stopRequest.setContainerIds(containerIds);
proxy.stopContainers(stopRequest);
- } catch (YarnException e) {
+ } catch (YarnException e) {
exception = true;
Assert.assertTrue(e.getMessage().contains(EXCEPTION_MSG));
Assert.assertTrue(e.getMessage().contains(EXCEPTION_CAUSE));
@@ -284,7 +294,7 @@ public GetContainerStatusesResponse getContainerStatuses(
GetContainerStatusesRequest request)
throws YarnException {
GetContainerStatusesResponse response =
- recordFactory.newRecordInstance(GetContainerStatusesResponse.class);
+ RECORD_FACTORY.newRecordInstance(GetContainerStatusesResponse.class);
response.setContainerStatuses(statuses);
return response;
}
@@ -293,8 +303,9 @@ public GetContainerStatusesResponse getContainerStatuses(
public StartContainersResponse startContainers(
StartContainersRequest requests) throws YarnException {
StartContainersResponse response =
- recordFactory.newRecordInstance(StartContainersResponse.class);
- for (StartContainerRequest request : requests.getStartContainerRequests()) {
+ RECORD_FACTORY.newRecordInstance(StartContainersResponse.class);
+ for (StartContainerRequest request :
+ requests.getStartContainerRequests()) {
Token containerToken = request.getContainerToken();
ContainerTokenIdentifier tokenId = null;
@@ -304,7 +315,7 @@ public StartContainersResponse startContainers(
throw RPCUtil.getRemoteException(e);
}
ContainerStatus status =
- recordFactory.newRecordInstance(ContainerStatus.class);
+ RECORD_FACTORY.newRecordInstance(ContainerStatus.class);
status.setState(ContainerState.RUNNING);
status.setContainerId(tokenId.getContainerID());
status.setExitStatus(0);
@@ -324,7 +335,8 @@ public StopContainersResponse stopContainers(StopContainersRequest request)
@Override
public IncreaseContainersResourceResponse increaseContainersResource(
- IncreaseContainersResourceRequest request) throws YarnException, IOException {
+ IncreaseContainersResourceRequest request)
+ throws YarnException, IOException {
return null;
}
@@ -383,7 +395,8 @@ public ReportNewCollectorInfoResponse reportNewCollectorInfo(
}
ReportNewCollectorInfoResponse response =
- recordFactory.newRecordInstance(ReportNewCollectorInfoResponse.class);
+ RECORD_FACTORY.newRecordInstance(
+ ReportNewCollectorInfoResponse.class);
return response;
}
@@ -392,8 +405,8 @@ public GetTimelineCollectorContextResponse getTimelineCollectorContext(
GetTimelineCollectorContextRequest request)
throws YarnException, IOException {
if (request.getApplicationId().getId() == 1) {
- return GetTimelineCollectorContextResponse.newInstance(
- "test_user_id", "test_flow_name", "test_flow_version", 12345678L);
+ return GetTimelineCollectorContextResponse.newInstance(
+ "test_user_id", "test_flow_name", "test_flow_version", 12345678L);
} else {
throw new YarnException("The application is not found.");
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
index 0c84be0e59..5bfbb8df1a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
@@ -187,8 +187,8 @@ protected ContainerManagerImpl createContainerManager(Context context,
}
}
- protected NMCollectorService createNMCollectorService(Context context) {
- return new NMCollectorService(context);
+ protected NMCollectorService createNMCollectorService(Context ctxt) {
+ return new NMCollectorService(ctxt);
}
protected WebServer createWebServer(Context nmContext,
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
index b37b38e4f7..f692bf1723 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
@@ -816,7 +816,8 @@ public void run() {
NodeStatusUpdaterImpl.this.context
.getNMTokenSecretManager().getCurrentKey(),
nodeLabelsForHeartbeat,
- NodeStatusUpdaterImpl.this.context.getRegisteredCollectors());
+ NodeStatusUpdaterImpl.this.context
+ .getRegisteredCollectors());
if (logAggregationEnabled) {
// pull log aggregation status for application running in this NM
@@ -939,23 +940,6 @@ public void run() {
}
}
- /**
- * Caller should take care of sending non null nodelabels for both
- * arguments
- *
- * @param nodeLabelsNew
- * @param nodeLabelsOld
- * @return if the New node labels are diff from the older one.
- */
- private boolean areNodeLabelsUpdated(Set nodeLabelsNew,
- Set nodeLabelsOld) {
- if (nodeLabelsNew.size() != nodeLabelsOld.size()
- || !nodeLabelsOld.containsAll(nodeLabelsNew)) {
- return true;
- }
- return false;
- }
-
private void updateTimelineClientsAddress(
NodeHeartbeatResponse response) {
Map knownCollectorsMap =
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
index 89e81ca6ed..cb63ae311d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
@@ -230,8 +230,7 @@ public ContainerManagerImpl(Context context, ContainerExecutor exec,
nmMetricsPublisher = createNMTimelinePublisher(context);
context.setNMTimelinePublisher(nmMetricsPublisher);
}
- this.containersMonitor =
- new ContainersMonitorImpl(exec, dispatcher, this.context);
+ this.containersMonitor = createContainersMonitor(exec);
addService(this.containersMonitor);
dispatcher.register(ContainerEventType.class,
@@ -447,8 +446,9 @@ protected SharedCacheUploadService createSharedCacheUploaderService() {
}
@VisibleForTesting
- protected NMTimelinePublisher createNMTimelinePublisher(Context context) {
- NMTimelinePublisher nmTimelinePublisherLocal = new NMTimelinePublisher(context);
+ protected NMTimelinePublisher createNMTimelinePublisher(Context ctxt) {
+ NMTimelinePublisher nmTimelinePublisherLocal =
+ new NMTimelinePublisher(ctxt);
addIfService(nmTimelinePublisherLocal);
return nmTimelinePublisherLocal;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationContainerFinishedEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationContainerFinishedEvent.java
index 9cd34ccf58..0a8ffdff65 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationContainerFinishedEvent.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationContainerFinishedEvent.java
@@ -25,7 +25,8 @@ public class ApplicationContainerFinishedEvent extends ApplicationEvent {
private ContainerStatus containerStatus;
public ApplicationContainerFinishedEvent(ContainerStatus containerStatus) {
- super(containerStatus.getContainerId().getApplicationAttemptId().getApplicationId(),
+ super(containerStatus.getContainerId().getApplicationAttemptId().
+ getApplicationId(),
ApplicationEventType.APPLICATION_CONTAINER_FINISHED);
this.containerStatus = containerStatus;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
index 00bd56bd0f..193dfead37 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java
@@ -74,7 +74,6 @@
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStartMonitoringEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStopMonitoringEvent;
-import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService.RecoveredContainerState;
@@ -87,7 +86,6 @@
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.Clock;
-import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.SystemClock;
import org.apache.hadoop.yarn.util.resource.Resources;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
index 4167ece602..e6a66bd9fe 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
@@ -435,8 +435,9 @@ public void run() {
+ " for the first time");
ResourceCalculatorProcessTree pt =
- ResourceCalculatorProcessTree.getResourceCalculatorProcessTree(
- pId, processTreeClass, conf);
+ ResourceCalculatorProcessTree.
+ getResourceCalculatorProcessTree(
+ pId, processTreeClass, conf);
ptInfo.setPid(pId);
ptInfo.setProcessTree(pt);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
index 39a61813ac..c4d45a90da 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
@@ -33,12 +33,12 @@
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetricOperation;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.Identifier;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
+import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetricOperation;
import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.Dispatcher;
@@ -56,6 +56,8 @@
import org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree;
import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
+import com.google.common.annotations.VisibleForTesting;
+
/**
* Metrics publisher service that publishes data to the timeline service v.2. It
* is used only if the timeline service v.2 is enabled and the system publishing
@@ -73,7 +75,7 @@ public class NMTimelinePublisher extends CompositeService {
private String httpAddress;
- protected final Map appToClientMap;
+ private final Map appToClientMap;
public NMTimelinePublisher(Context context) {
super(NMTimelinePublisher.class.getName());
@@ -99,6 +101,11 @@ protected void serviceStart() throws Exception {
this.httpAddress = nodeId.getHost() + ":" + context.getHttpPort();
}
+ @VisibleForTesting
+ Map getAppToClientMap() {
+ return appToClientMap;
+ }
+
protected void handleNMTimelineEvent(NMTimelineEvent event) {
switch (event.getType()) {
case TIMELINE_ENTITY_PUBLISH:
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java
index 8cec5efd09..e7d18b7ecd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java
@@ -85,7 +85,6 @@
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
-import org.apache.hadoop.yarn.server.api.ContainerContext;
import org.apache.hadoop.yarn.server.api.ResourceTracker;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
@@ -1707,9 +1706,9 @@ protected NMContext createNMContext(
NMContainerTokenSecretManager containerTokenSecretManager,
NMTokenSecretManagerInNM nmTokenSecretManager,
NMStateStoreService store, boolean isDistributedSchedulingEnabled,
- Configuration conf) {
+ Configuration config) {
return new MyNMContext(containerTokenSecretManager,
- nmTokenSecretManager, conf);
+ nmTokenSecretManager, config);
}
};
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java
index 92744b2480..05ea03641e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java
@@ -645,8 +645,9 @@ private Container createMockedContainer(ApplicationId appId, int containerId) {
when(c.getLaunchContext()).thenReturn(launchContext);
when(launchContext.getApplicationACLs()).thenReturn(
new HashMap());
- when(c.cloneAndGetContainerStatus()).thenReturn(BuilderUtils.newContainerStatus(cId,
- ContainerState.NEW, "", 0, Resource.newInstance(1024, 1)));
+ when(c.cloneAndGetContainerStatus()).thenReturn(
+ BuilderUtils.newContainerStatus(cId,
+ ContainerState.NEW, "", 0, Resource.newInstance(1024, 1)));
return c;
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
index 7a4fca3661..cc9e662f9e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
@@ -99,7 +99,6 @@
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.AuxiliaryServiceHelper;
-import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
import org.hamcrest.CoreMatchers;
@@ -111,16 +110,17 @@
public class TestContainerLaunch extends BaseContainerManagerTest {
private static final String INVALID_JAVA_HOME = "/no/jvm/here";
- protected Context distContext = new NMContext(new NMContainerTokenSecretManager(
- conf), new NMTokenSecretManagerInNM(), null,
- new ApplicationACLsManager(conf), new NMNullStateStoreService(), false,
- conf) {
- public int getHttpPort() {
- return HTTP_PORT;
- };
- public NodeId getNodeId() {
- return NodeId.newInstance("ahost", 1234);
- };
+ private Context distContext =
+ new NMContext(new NMContainerTokenSecretManager(conf),
+ new NMTokenSecretManagerInNM(), null,
+ new ApplicationACLsManager(conf), new NMNullStateStoreService(),
+ false, conf) {
+ public int getHttpPort() {
+ return HTTP_PORT;
+ };
+ public NodeId getNodeId() {
+ return NodeId.newInstance("ahost", 1234);
+ };
};
public TestContainerLaunch() throws UnsupportedFileSystemException {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java
index 4aa28d2dbe..ae9397a78e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java
@@ -55,8 +55,8 @@ public void testContainerResourceUsage() {
when(context.getHttpPort()).thenReturn(0);
NMTimelinePublisher publisher = new NMTimelinePublisher(context) {
public void createTimelineClient(ApplicationId appId) {
- if (!appToClientMap.containsKey(appId)) {
- appToClientMap.put(appId, timelineClient);
+ if (!getAppToClientMap().containsKey(appId)) {
+ getAppToClientMap().put(appId, timelineClient);
}
}
};
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java
index c98304001a..8feca21ccc 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java
@@ -24,7 +24,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
@@ -40,10 +39,9 @@ public class MockApp implements Application {
Map containers = new HashMap();
ApplicationState appState;
Application app;
- String flowName;
- String flowVersion;
- long flowRunId;
- TimelineClient timelineClient = null;
+ private String flowName;
+ private String flowVersion;
+ private long flowRunId;
public MockApp(int uniqId) {
this("mockUser", 1234, uniqId);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
index e1e3298fda..1e702de934 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
@@ -373,7 +373,8 @@ public RMApplicationHistoryWriter getRMApplicationHistoryWriter() {
@Override
public void setRMTimelineCollectorManager(
RMTimelineCollectorManager timelineCollectorManager) {
- activeServiceContext.setRMTimelineCollectorManager(timelineCollectorManager);
+ activeServiceContext.setRMTimelineCollectorManager(
+ timelineCollectorManager);
}
@Override
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
index 61e7d650ca..7f4ed33f58 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
@@ -311,7 +311,7 @@ public void containerFinished(RMContainer container, long finishedTime) {
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO,
container.getAllocatedNode().getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO,
- container.getAllocatedNode().getPort());
+ container.getAllocatedNode().getPort());
entity.setOtherInfo(entityInfo);
tEvent.setEventInfo(eventInfo);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java
index 0da395a291..3ea4714976 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java
@@ -76,9 +76,9 @@
public class TestSystemMetricsPublisherForV2 {
/**
- * is the folder where the FileSystemTimelineWriterImpl writes the entities
+ * The folder where the FileSystemTimelineWriterImpl writes the entities.
*/
- protected static File testRootDir = new File("target",
+ private static File testRootDir = new File("target",
TestSystemMetricsPublisherForV2.class.getName() + "-localDir")
.getAbsoluteFile();
@@ -151,7 +151,8 @@ private static Configuration getTimelineV2Conf() {
} catch (IOException e) {
e.printStackTrace();
Assert
- .fail("Exception while setting the TIMELINE_SERVICE_STORAGE_DIR_ROOT ");
+ .fail("Exception while setting the " +
+ "TIMELINE_SERVICE_STORAGE_DIR_ROOT ");
}
return conf;
}
@@ -159,30 +160,30 @@ private static Configuration getTimelineV2Conf() {
@Test
public void testSystemMetricPublisherInitialization() {
@SuppressWarnings("resource")
- TimelineServiceV2Publisher metricsPublisher =
+ TimelineServiceV2Publisher publisher =
new TimelineServiceV2Publisher(mock(RMContext.class));
try {
Configuration conf = getTimelineV2Conf();
conf.setBoolean(YarnConfiguration.RM_PUBLISH_CONTAINER_EVENTS_ENABLED,
YarnConfiguration.DEFAULT_RM_PUBLISH_CONTAINER_EVENTS_ENABLED);
- metricsPublisher.init(conf);
+ publisher.init(conf);
assertFalse(
"Default configuration should not publish container events from RM",
- metricsPublisher.isPublishContainerEvents());
+ publisher.isPublishContainerEvents());
- metricsPublisher.stop();
+ publisher.stop();
- metricsPublisher = new TimelineServiceV2Publisher(mock(RMContext.class));
+ publisher = new TimelineServiceV2Publisher(mock(RMContext.class));
conf = getTimelineV2Conf();
- metricsPublisher.init(conf);
+ publisher.init(conf);
assertTrue("Expected to have registered event handlers and set ready to "
+ "publish events after init",
- metricsPublisher.isPublishContainerEvents());
- metricsPublisher.start();
+ publisher.isPublishContainerEvents());
+ publisher.start();
assertTrue("Expected to publish container events from RM",
- metricsPublisher.isPublishContainerEvents());
+ publisher.isPublishContainerEvents());
} finally {
- metricsPublisher.stop();
+ publisher.stop();
}
}
@@ -243,7 +244,7 @@ public void testPublishAppAttemptMetrics() throws Exception {
+ FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION;
File appFile = new File(outputDirApp, timelineServiceFileName);
Assert.assertTrue(appFile.exists());
- verifyEntity(appFile,2, AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE);
+ verifyEntity(appFile, 2, AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE);
}
@Test(timeout = 10000)
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java
index 3a5c797af0..5a63547e2f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java
@@ -134,7 +134,8 @@ public void testPutExtendedEntities() throws Exception {
ApplicationEntity app = new ApplicationEntity();
app.setId(appId.toString());
flow.addChild(app.getType(), app.getId());
- ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
+ ApplicationAttemptId attemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
ApplicationAttemptEntity appAttempt = new ApplicationAttemptEntity();
appAttempt.setId(attemptId.toString());
ContainerId containerId = ContainerId.newContainerId(attemptId, 1);
@@ -144,8 +145,10 @@ public void testPutExtendedEntities() throws Exception {
user.setId(UserGroupInformation.getCurrentUser().getShortUserName());
QueueEntity queue = new QueueEntity();
queue.setId("default_queue");
- client.putEntities(cluster, flow, app, appAttempt, container, user, queue);
- client.putEntitiesAsync(cluster, flow, app, appAttempt, container, user, queue);
+ client.putEntities(cluster, flow, app, appAttempt, container, user,
+ queue);
+ client.putEntitiesAsync(cluster, flow, app, appAttempt, container, user,
+ queue);
} finally {
client.stop();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index 9af920553c..b1854488f8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -256,17 +256,17 @@ private static void loadData() throws Exception {
entity5.addEvent(event54);
Map> isRelatedTo1 = new HashMap>();
isRelatedTo1.put("type2",
- Sets.newHashSet("entity21","entity22","entity23","entity24"));
- isRelatedTo1.put("type4", Sets.newHashSet("entity41","entity42"));
- isRelatedTo1.put("type1", Sets.newHashSet("entity14","entity15"));
+ Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
+ isRelatedTo1.put("type4", Sets.newHashSet("entity41", "entity42"));
+ isRelatedTo1.put("type1", Sets.newHashSet("entity14", "entity15"));
isRelatedTo1.put("type3",
Sets.newHashSet("entity31", "entity35", "entity32", "entity33"));
entity5.addIsRelatedToEntities(isRelatedTo1);
Map> relatesTo1 = new HashMap>();
relatesTo1.put("type2",
- Sets.newHashSet("entity21","entity22","entity23","entity24"));
- relatesTo1.put("type4", Sets.newHashSet("entity41","entity42"));
- relatesTo1.put("type1", Sets.newHashSet("entity14","entity15"));
+ Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
+ relatesTo1.put("type4", Sets.newHashSet("entity41", "entity42"));
+ relatesTo1.put("type1", Sets.newHashSet("entity14", "entity15"));
relatesTo1.put("type3",
Sets.newHashSet("entity31", "entity35", "entity32", "entity33"));
entity5.addRelatesToEntities(relatesTo1);
@@ -317,16 +317,16 @@ private static void loadData() throws Exception {
entity6.addEvent(event64);
Map> isRelatedTo2 = new HashMap>();
isRelatedTo2.put("type2",
- Sets.newHashSet("entity21","entity22","entity23","entity24"));
- isRelatedTo2.put("type5", Sets.newHashSet("entity51","entity52"));
- isRelatedTo2.put("type6", Sets.newHashSet("entity61","entity66"));
+ Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
+ isRelatedTo2.put("type5", Sets.newHashSet("entity51", "entity52"));
+ isRelatedTo2.put("type6", Sets.newHashSet("entity61", "entity66"));
isRelatedTo2.put("type3", Sets.newHashSet("entity31"));
entity6.addIsRelatedToEntities(isRelatedTo2);
Map> relatesTo2 = new HashMap>();
relatesTo2.put("type2",
- Sets.newHashSet("entity21","entity22","entity23","entity24"));
- relatesTo2.put("type5", Sets.newHashSet("entity51","entity52"));
- relatesTo2.put("type6", Sets.newHashSet("entity61","entity66"));
+ Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
+ relatesTo2.put("type5", Sets.newHashSet("entity51", "entity52"));
+ relatesTo2.put("type6", Sets.newHashSet("entity61", "entity66"));
relatesTo2.put("type3", Sets.newHashSet("entity31"));
entity6.addRelatesToEntities(relatesTo2);
te5.addEntity(entity6);
@@ -391,10 +391,11 @@ private static ClientResponse getResponse(Client client, URI uri)
client.resource(uri).accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON).get(ClientResponse.class);
if (resp == null ||
- resp.getClientResponseStatus() != ClientResponse.Status.OK) {
+ resp.getStatusInfo().getStatusCode() !=
+ ClientResponse.Status.OK.getStatusCode()) {
String msg = "";
if (resp != null) {
- msg = resp.getClientResponseStatus().toString();
+ msg = String.valueOf(resp.getStatusInfo().getStatusCode());
}
throw new IOException("Incorrect response from timeline reader. " +
"Status=" + msg);
@@ -406,7 +407,8 @@ private static class DummyURLConnectionFactory
implements HttpURLConnectionFactory {
@Override
- public HttpURLConnection getHttpURLConnection(final URL url) throws IOException {
+ public HttpURLConnection getHttpURLConnection(final URL url)
+ throws IOException {
try {
return (HttpURLConnection)url.openConnection();
} catch (UndeclaredThrowableException e) {
@@ -422,10 +424,10 @@ private static TimelineEntity newEntity(String type, String id) {
}
private static TimelineMetric newMetric(TimelineMetric.Type type,
- String id, long ts, Number value) {
+ String id, long t, Number value) {
TimelineMetric metric = new TimelineMetric(type);
metric.setId(id);
- metric.addValue(ts, value);
+ metric.addValue(t, value);
return metric;
}
@@ -463,7 +465,7 @@ private static void verifyHttpResponse(Client client, URI uri,
.type(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(resp);
assertTrue("Response from server should have been " + status,
- resp.getClientResponseStatus().equals(status));
+ resp.getStatusInfo().getStatusCode() == status.getStatusCode());
System.out.println("Response is: " + resp.getEntity(String.class));
}
@@ -866,7 +868,7 @@ public void testUIDQueryWithAndWithoutFlowContextInfo() throws Exception {
String appUIDWithoutFlowInfo = "cluster1!application_1111111111_1111";
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/"+
"app-uid/" + appUIDWithoutFlowInfo);
- resp = getResponse(client, uri);;
+ resp = getResponse(client, uri);
TimelineEntity appEntity2 = resp.getEntity(TimelineEntity.class);
assertNotNull(appEntity2);
assertEquals(
@@ -893,7 +895,7 @@ public void testUIDQueryWithAndWithoutFlowContextInfo() throws Exception {
String entityUIDWithFlowInfo = appUIDWithFlowInfo + "!type1!entity1";
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/"+
"entity-uid/" + entityUIDWithFlowInfo);
- resp = getResponse(client, uri);;
+ resp = getResponse(client, uri);
TimelineEntity singleEntity1 = resp.getEntity(TimelineEntity.class);
assertNotNull(singleEntity1);
assertEquals("type1", singleEntity1.getType());
@@ -903,7 +905,7 @@ public void testUIDQueryWithAndWithoutFlowContextInfo() throws Exception {
appUIDWithoutFlowInfo + "!type1!entity1";
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/"+
"entity-uid/" + entityUIDWithoutFlowInfo);
- resp = getResponse(client, uri);;
+ resp = getResponse(client, uri);
TimelineEntity singleEntity2 = resp.getEntity(TimelineEntity.class);
assertNotNull(singleEntity2);
assertEquals("type1", singleEntity2.getType());
@@ -1054,12 +1056,12 @@ public void testGetApp() throws Exception {
assertEquals("application_1111111111_2222", entity.getId());
assertEquals(1, entity.getMetrics().size());
TimelineMetric m4 = newMetric(TimelineMetric.Type.SINGLE_VALUE,
- "MAP_SLOT_MILLIS", ts - 80000, 101L);
+ "MAP_SLOT_MILLIS", ts - 80000, 101L);
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(verifyMetrics(metric, m4));
}
} finally {
- client.destroy();
+ client.destroy();
}
}
@@ -1425,7 +1427,8 @@ public void testGetEntitiesInfoFilters() throws Exception {
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/application_1111111111_1111/" +
"entities/type1?infofilters=(info1%20eq%20cluster1%20AND%20info4%20" +
- "eq%2035000)%20OR%20(info1%20eq%20cluster2%20AND%20info2%20eq%202.0)");
+ "eq%2035000)%20OR%20(info1%20eq%20cluster2%20AND%20info2%20eq%202.0" +
+ ")");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType>(){});
assertNotNull(entities);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
index fd5a7f523f..a8de759bce 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
@@ -444,17 +444,17 @@ private static void loadEntities() throws IOException {
te.addEntity(entity2);
HBaseTimelineWriterImpl hbi = null;
try {
- hbi = new HBaseTimelineWriterImpl(util.getConfiguration());
- hbi.init(util.getConfiguration());
- hbi.start();
- String cluster = "cluster1";
- String user = "user1";
- String flow = "some_flow_name";
- String flowVersion = "AB7822C10F1111";
- long runid = 1002345678919L;
- String appName = "application_1231111111_1111";
- hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
- hbi.stop();
+ hbi = new HBaseTimelineWriterImpl(util.getConfiguration());
+ hbi.init(util.getConfiguration());
+ hbi.start();
+ String cluster = "cluster1";
+ String user = "user1";
+ String flow = "some_flow_name";
+ String flowVersion = "AB7822C10F1111";
+ long runid = 1002345678919L;
+ String appName = "application_1231111111_1111";
+ hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
+ hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
@@ -531,7 +531,7 @@ public void testWriteNullApplicationToHBase() throws Exception {
int count = 0;
for (Result rr = resultScanner.next(); rr != null;
rr = resultScanner.next()) {
- count++;
+ count++;
}
// there should be no rows written
// no exceptions thrown during write
@@ -1173,7 +1173,7 @@ public void testEvents() throws IOException {
for (TimelineEvent e : events) {
assertEquals(eventId, e.getId());
assertEquals(expTs, Long.valueOf(e.getTimestamp()));
- Map info = e.getInfo();
+ Map info = e.getInfo();
assertEquals(1, info.size());
for (Map.Entry infoEntry : info.entrySet()) {
assertEquals(expKey, infoEntry.getKey());
@@ -1249,7 +1249,7 @@ public void testEventsWithEmptyInfo() throws IOException {
// the qualifier is a compound key
// hence match individual values
assertEquals(eventId, eventColumnName.getId());
- assertEquals(expTs,eventColumnName.getTimestamp());
+ assertEquals(expTs, eventColumnName.getTimestamp());
// key must be empty
assertNull(eventColumnName.getInfoKey());
Object value = e.getValue();
@@ -1280,7 +1280,7 @@ public void testEventsWithEmptyInfo() throws IOException {
for (TimelineEvent e : events) {
assertEquals(eventId, e.getId());
assertEquals(expTs, Long.valueOf(e.getTimestamp()));
- Map info = e.getInfo();
+ Map info = e.getInfo();
assertTrue(info == null || info.isEmpty());
}
} finally {
@@ -1337,7 +1337,7 @@ public void testEventsEscapeTs() throws IOException {
for (TimelineEvent e : events) {
assertEquals(eventId, e.getId());
assertEquals(expTs, e.getTimestamp());
- Map info = e.getInfo();
+ Map info = e.getInfo();
assertEquals(1, info.size());
for (Map.Entry infoEntry : info.entrySet()) {
assertEquals(expKey, infoEntry.getKey());
@@ -1417,14 +1417,14 @@ public void testNonIntegralMetricValues() throws IOException {
public void testReadEntities() throws Exception {
TimelineEntity entity = reader.getEntity(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
- 1002345678919L, "application_1231111111_1111","world", "hello"),
+ 1002345678919L, "application_1231111111_1111", "world", "hello"),
new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertNotNull(entity);
assertEquals(3, entity.getConfigs().size());
assertEquals(1, entity.getIsRelatedToEntities().size());
Set entities = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
- 1002345678919L, "application_1231111111_1111","world",
+ 1002345678919L, "application_1231111111_1111", "world",
null), new TimelineEntityFilters(),
new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
assertEquals(3, entities.size());
@@ -1460,7 +1460,7 @@ public void testReadEntities() throws Exception {
public void testFilterEntitiesByCreatedTime() throws Exception {
Set entities = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
- 1002345678919L, "application_1231111111_1111","world", null),
+ 1002345678919L, "application_1231111111_1111", "world", null),
new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null,
null, null, null, null, null), new TimelineDataToRetrieve());
assertEquals(3, entities.size());
@@ -1468,12 +1468,12 @@ public void testFilterEntitiesByCreatedTime() throws Exception {
if (!entity.getId().equals("hello") && !entity.getId().equals("hello1") &&
!entity.getId().equals("hello2")) {
Assert.fail("Entities with ids' hello, hello1 and hello2 should be" +
- " present");
+ " present");
}
}
entities = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
- 1002345678919L, "application_1231111111_1111","world", null),
+ 1002345678919L, "application_1231111111_1111", "world", null),
new TimelineEntityFilters(null, 1425016502015L, null, null, null, null,
null, null, null), new TimelineDataToRetrieve());
assertEquals(2, entities.size());
@@ -1485,15 +1485,15 @@ public void testFilterEntitiesByCreatedTime() throws Exception {
}
entities = reader.getEntities(
new TimelineReaderContext("cluster1", "user1", "some_flow_name",
- 1002345678919L, "application_1231111111_1111","world", null),
+ 1002345678919L, "application_1231111111_1111", "world", null),
new TimelineEntityFilters(null, null, 1425016502015L, null, null, null,
null, null, null), new TimelineDataToRetrieve());
- assertEquals(1, entities.size());
- for (TimelineEntity entity : entities) {
- if (!entity.getId().equals("hello")) {
- Assert.fail("Entity with id hello should be present");
- }
- }
+ assertEquals(1, entities.size());
+ for (TimelineEntity entity : entities) {
+ if (!entity.getId().equals("hello")) {
+ Assert.fail("Entity with id hello should be present");
+ }
+ }
}
@Test
@@ -1518,7 +1518,7 @@ public void testReadEntitiesRelationsAndEventFiltersDefaultView()
new HashSet