From 6cf6ab7b780de2b0c2c9ea730e1f366965a0d682 Mon Sep 17 00:00:00 2001 From: Sangjin Lee Date: Sun, 10 Jul 2016 08:38:19 -0700 Subject: [PATCH] Made a number of miscellaneous fixes for javac, javadoc, and checstyle warnings. --- .../jobhistory/JobHistoryEventHandler.java | 4 - .../v2/app/rm/RMContainerAllocator.java | 2 +- .../TestJobHistoryEventHandler.java | 3 +- .../mapreduce/jobhistory/HistoryEvent.java | 12 +- .../mapred/TestMRTimelineEventHandling.java | 24 ++- .../apache/hadoop/mapred/UtilsForTests.java | 4 +- .../hadoop/mapreduce/EntityWriterV2.java | 3 +- .../JobHistoryFileReplayMapperV2.java | 4 +- .../SimpleEntityWriterConstants.java | 14 +- .../mapreduce/SimpleEntityWriterV1.java | 6 +- .../mapreduce/TimelineEntityConverterV1.java | 10 +- .../mapreduce/TimelineEntityConverterV2.java | 11 +- .../mapreduce/TimelineServicePerformance.java | 3 +- .../mapreduce/v2/MiniMRYarnCluster.java | 5 +- .../records/timelineservice/package-info.java | 1 - .../hadoop/yarn/conf/YarnConfiguration.java | 7 +- .../yarn/util/TimelineServiceHelper.java | 2 + .../distributedshell/ApplicationMaster.java | 27 ++- .../TestDistributedShell.java | 23 ++- .../TestDistributedShellWithNodeLabels.java | 2 - .../hadoop/yarn/client/api/AMRMClient.java | 10 +- .../client/api/async/AMRMClientAsync.java | 4 +- .../yarn/client/api/TimelineClient.java | 34 ++-- .../client/api/impl/TimelineClientImpl.java | 42 ++-- .../TestTimelineServiceRecords.java | 15 +- .../api/impl/TestTimelineClientV2Impl.java | 37 ++-- .../yarn/util/TestTimelineServiceHelper.java | 21 +- .../api/CollectorNodemanagerProtocol.java | 12 +- .../api/CollectorNodemanagerProtocolPB.java | 3 +- ...lectorNodemanagerProtocolPBClientImpl.java | 4 +- ...ectorNodemanagerProtocolPBServiceImpl.java | 6 +- ...TimelineCollectorContextRequestPBImpl.java | 25 ++- ...imelineCollectorContextResponsePBImpl.java | 24 ++- .../impl/pb/NodeHeartbeatRequestPBImpl.java | 9 +- .../impl/pb/NodeHeartbeatResponsePBImpl.java | 6 +- .../ReportNewCollectorInfoRequestPBImpl.java | 16 +- .../ReportNewCollectorInfoResponsePBImpl.java | 12 +- .../impl/pb/AppCollectorsMapPBImpl.java | 17 +- .../java/org/apache/hadoop/yarn/TestRPC.java | 55 +++-- .../yarn/server/nodemanager/NodeManager.java | 4 +- .../nodemanager/NodeStatusUpdaterImpl.java | 20 +- .../ContainerManagerImpl.java | 8 +- .../ApplicationContainerFinishedEvent.java | 3 +- .../container/ContainerImpl.java | 2 - .../monitor/ContainersMonitorImpl.java | 5 +- .../timelineservice/NMTimelinePublisher.java | 11 +- .../nodemanager/TestNodeStatusUpdater.java | 5 +- .../application/TestApplication.java | 5 +- .../launcher/TestContainerLaunch.java | 22 +- .../TestNMTimelinePublisher.java | 4 +- .../server/nodemanager/webapp/MockApp.java | 8 +- .../server/resourcemanager/RMContextImpl.java | 3 +- .../metrics/TimelineServiceV1Publisher.java | 2 +- .../TestSystemMetricsPublisherForV2.java | 29 +-- .../TestTimelineServiceClientIntegration.java | 9 +- ...TimelineReaderWebServicesHBaseStorage.java | 51 ++--- .../storage/TestHBaseTimelineStorage.java | 192 +++++++++--------- ...stPhoenixOfflineAggregationWriterImpl.java | 4 +- .../storage/flow/TestFlowDataGenerator.java | 39 ++-- .../flow/TestHBaseStorageFlowActivity.java | 13 +- .../storage/flow/TestHBaseStorageFlowRun.java | 10 +- .../TestHBaseStorageFlowRunCompaction.java | 33 +-- .../TimelineCollectorWebService.java | 3 +- .../storage/application/package-info.java | 1 - .../storage/common/AppIdKeyConverter.java | 3 +- .../storage/common/TimestampGenerator.java | 3 +- .../storage/entity/package-info.java | 1 - .../TestNMTimelineCollectorManager.java | 24 +-- ...stPerNodeTimelineCollectorsAuxService.java | 16 +- .../reader/TestTimelineReaderUtils.java | 6 +- .../reader/TestTimelineReaderWebServices.java | 11 +- .../TestTimelineReaderWebServicesUtils.java | 4 +- .../reader/TestTimelineUIDConverter.java | 2 +- .../TestFileSystemTimelineReaderImpl.java | 44 ++-- .../TestFileSystemTimelineWriterImpl.java | 7 +- .../storage/common/TestRowKeys.java | 22 +- .../storage/common/TestSeparator.java | 28 +-- .../src/site/markdown/TimelineServiceV2.md | 1 - 78 files changed, 634 insertions(+), 543 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java index 9b59676641..817cd14593 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java @@ -46,7 +46,6 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TaskStatus; import org.apache.hadoop.mapreduce.Counter; -import org.apache.hadoop.mapreduce.CounterGroup; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.MRJobConfig; @@ -76,11 +75,8 @@ import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.node.ArrayNode; import org.codehaus.jackson.node.JsonNodeFactory; -import org.codehaus.jackson.node.ObjectNode; import com.google.common.annotations.VisibleForTesting; import com.sun.jersey.api.client.ClientHandlerException; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java index ea2046b319..217337ef6f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java @@ -829,7 +829,7 @@ private List getResources() throws Exception { if (collectorAddr != null && !collectorAddr.isEmpty() && appContext.getTimelineClient() != null) { appContext.getTimelineClient().setTimelineServiceAddress( - response.getCollectorAddr()); + response.getCollectorAddr()); } for (ContainerStatus cont : finishedContainers) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java index a84e6d21e6..064f9ec29f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java @@ -786,7 +786,8 @@ public TestParams() { public TestParams(boolean isLastAMRetry) { this(AppContext.class, isLastAMRetry); } - public TestParams(Class contextClass, boolean isLastAMRetry) { + public TestParams(Class contextClass, + boolean isLastAMRetry) { this.isLastAMRetry = isLastAMRetry; mockAppContext = mockAppContext(contextClass, appId, this.isLastAMRetry); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java index 1d59ebef89..1ba7195ec2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/HistoryEvent.java @@ -42,9 +42,17 @@ public interface HistoryEvent { /** Set the Avro datum wrapped by this. */ void setDatum(Object datum); - /** Map HistoryEvent to TimelineEvent */ + /** + * Map HistoryEvent to TimelineEvent. + * + * @return the timeline event + */ TimelineEvent toTimelineEvent(); - /** Counters or Metrics if any else return null. */ + /** + * Counters or Metrics if any else return null. + * + * @return the set of timeline metrics + */ Set getTimelineMetrics(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java index 6b9f27e2aa..90748a9002 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java @@ -64,7 +64,7 @@ public class TestMRTimelineEventHandling { private static final String TIMELINE_AUX_SERVICE_NAME = "timeline_collector"; private static final Log LOG = - LogFactory.getLog(TestMRTimelineEventHandling.class); + LogFactory.getLog(TestMRTimelineEventHandling.class); @Test public void testTimelineServiceStartInMiniCluster() throws Exception { @@ -169,6 +169,7 @@ public void testMRTimelineEventHandling() throws Exception { } } + @SuppressWarnings("deprecation") @Test public void testMRNewTimelineServiceEventHandling() throws Exception { LOG.info("testMRNewTimelineServiceEventHandling start."); @@ -183,7 +184,7 @@ public void testMRNewTimelineServiceEventHandling() throws Exception { // enable aux-service based timeline collectors conf.set(YarnConfiguration.NM_AUX_SERVICES, TIMELINE_AUX_SERVICE_NAME); conf.set(YarnConfiguration.NM_AUX_SERVICES + "." + TIMELINE_AUX_SERVICE_NAME - + ".class", PerNodeTimelineCollectorsAuxService.class.getName()); + + ".class", PerNodeTimelineCollectorsAuxService.class.getName()); conf.setBoolean(YarnConfiguration.SYSTEM_METRICS_PUBLISHER_ENABLED, true); @@ -245,7 +246,8 @@ public void testMRNewTimelineServiceEventHandling() throws Exception { } // Cleanup test file String testRoot = - FileSystemTimelineWriterImpl.DEFAULT_TIMELINE_SERVICE_STORAGE_DIR_ROOT; + FileSystemTimelineWriterImpl. + DEFAULT_TIMELINE_SERVICE_STORAGE_DIR_ROOT; File testRootFolder = new File(testRoot); if(testRootFolder.isDirectory()) { FileUtils.deleteDirectory(testRootFolder); @@ -320,8 +322,10 @@ private void checkNewTimelineEvent(ApplicationId appId, " does not exist.", taskFolder.isDirectory()); - String taskEventFileName = appId.toString().replaceAll("application", "task") - + "_m_000000" + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; + String taskEventFileName = + appId.toString().replaceAll("application", "task") + + "_m_000000" + + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; String taskEventFilePath = outputDirTask + taskEventFileName; File taskEventFile = new File(taskEventFilePath); @@ -372,10 +376,12 @@ private void verifyEntity(File entityFile, String eventId, reader = new BufferedReader(new FileReader(entityFile)); while ((strLine = reader.readLine()) != null) { if (strLine.trim().length() > 0) { - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = - FileSystemTimelineReaderImpl.getTimelineRecordFromJSON( - strLine.trim(), - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.class); + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity + entity = + FileSystemTimelineReaderImpl.getTimelineRecordFromJSON( + strLine.trim(), + org.apache.hadoop.yarn.api.records.timelineservice. + TimelineEntity.class); if (eventId == null) { // Job metrics are published without any events for // ApplicationEntity. There is also possibility that some other diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java index 2fb6828e92..935c175e70 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java @@ -621,8 +621,8 @@ static RunningJob runJob(JobConf conf, Path inDir, Path outDir, int numMaps, return job; } - public static void waitForAppFinished(RunningJob job, MiniMRYarnCluster cluster) - throws IOException { + public static void waitForAppFinished(RunningJob job, + MiniMRYarnCluster cluster) throws IOException { ApplicationId appId = ApplicationId.newInstance( Long.parseLong(job.getID().getJtIdentifier()), job.getID().getId()); ConcurrentMap rmApps = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java index f5d95c3a40..74d7b9476e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/EntityWriterV2.java @@ -33,7 +33,8 @@ * to the timeline service. */ abstract class EntityWriterV2 - extends org.apache.hadoop.mapreduce.Mapper { + extends org.apache.hadoop.mapreduce.Mapper + { @Override public void map(IntWritable key, IntWritable val, Context context) throws IOException { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java index 6a9a878c73..2ec48336e7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV2.java @@ -89,8 +89,8 @@ protected void writeEntities(Configuration tlConf, parser.parseHistoryFile(job.getJobHistoryFilePath()); Configuration jobConf = parser.parseConfiguration(job.getJobConfFilePath()); - LOG.info("parsed the job history file and the configuration file for job" - + jobIdStr); + LOG.info("parsed the job history file and the configuration file " + + "for job " + jobIdStr); // set the context // flow id: job name, flow run id: timestamp, user id diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java index b89d0e879b..d96ad7615a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterConstants.java @@ -32,12 +32,12 @@ interface SimpleEntityWriterConstants { /** * To ensure that the compression really gets exercised, generate a - * random alphanumeric fixed length payload + * random alphanumeric fixed length payload. */ - char[] ALPHA_NUMS = new char[] { 'a', 'b', 'c', 'd', 'e', 'f', - 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', - 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', - 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', - 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2', - '3', '4', '5', '6', '7', '8', '9', '0', ' ' }; + char[] ALPHA_NUMS = new char[] {'a', 'b', 'c', 'd', 'e', 'f', + 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', + 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', + 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', + 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2', + '3', '4', '5', '6', '7', '8', '9', '0', ' '}; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java index b10ae049f8..16d14a18c8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java @@ -39,11 +39,13 @@ * configuration. */ class SimpleEntityWriterV1 - extends org.apache.hadoop.mapreduce.Mapper + extends org.apache.hadoop.mapreduce.Mapper + implements SimpleEntityWriterConstants { private static final Log LOG = LogFactory.getLog(SimpleEntityWriterV1.class); - public void map(IntWritable key, IntWritable val, Context context) throws IOException { + public void map(IntWritable key, IntWritable val, Context context) + throws IOException { TimelineClient tlc = new TimelineClientImpl(); Configuration conf = context.getConfiguration(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java index 4d8b74bd7b..dcc3ce014b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV1.java @@ -90,9 +90,10 @@ private TimelineEntity createJobEntity(JobInfo jobInfo, Configuration conf) { return job; } - private Set createTaskAndTaskAttemptEntities(JobInfo jobInfo) { + private Set + createTaskAndTaskAttemptEntities(JobInfo jobInfo) { Set entities = new HashSet<>(); - Map taskInfoMap = jobInfo.getAllTasks(); + Map taskInfoMap = jobInfo.getAllTasks(); LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() + " tasks"); for (TaskInfo taskInfo: taskInfoMap.values()) { @@ -124,7 +125,7 @@ private TimelineEntity createTaskEntity(TaskInfo taskInfo) { private Set createTaskAttemptEntities(TaskInfo taskInfo) { Set taskAttempts = new HashSet(); - Map taskAttemptInfoMap = + Map taskAttemptInfoMap = taskInfo.getAllTaskAttempts(); LOG.info("task " + taskInfo.getTaskId() + " has " + taskAttemptInfoMap.size() + " task attempts"); @@ -135,7 +136,8 @@ private Set createTaskAttemptEntities(TaskInfo taskInfo) { return taskAttempts; } - private TimelineEntity createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) { + private TimelineEntity + createTaskAttemptEntity(TaskAttemptInfo taskAttemptInfo) { TimelineEntity taskAttempt = new TimelineEntity(); taskAttempt.setEntityType(TASK_ATTEMPT); taskAttempt.setEntityId(taskAttemptInfo.getAttemptId().toString()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java index 79633d2835..45812fe7da 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineEntityConverterV2.java @@ -27,11 +27,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.Counter; -import org.apache.hadoop.mapreduce.CounterGroup; -import org.apache.hadoop.mapreduce.Counters; -import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; @@ -109,7 +104,7 @@ private TimelineEntity createJobEntity(JobInfo jobInfo, Configuration conf) { } private void addConfiguration(TimelineEntity job, Configuration conf) { - for (Map.Entry e: conf) { + for (Map.Entry e: conf) { job.addConfig(e.getKey(), e.getValue()); } } @@ -130,7 +125,7 @@ private void addMetrics(TimelineEntity entity, Counters counters) { private List createTaskAndTaskAttemptEntities( JobInfo jobInfo) { List entities = new ArrayList<>(); - Map taskInfoMap = jobInfo.getAllTasks(); + Map taskInfoMap = jobInfo.getAllTasks(); LOG.info("job " + jobInfo.getJobId()+ " has " + taskInfoMap.size() + " tasks"); for (TaskInfo taskInfo: taskInfoMap.values()) { @@ -167,7 +162,7 @@ private TimelineEntity createTaskEntity(TaskInfo taskInfo) { private Set createTaskAttemptEntities(TaskInfo taskInfo) { Set taskAttempts = new HashSet(); - Map taskAttemptInfoMap = + Map taskAttemptInfoMap = taskInfo.getAllTaskAttempts(); LOG.info("task " + taskInfo.getTaskId() + " has " + taskAttemptInfoMap.size() + " task attempts"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java index 1a14137bd4..7fa0444840 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TimelineServicePerformance.java @@ -137,7 +137,8 @@ public static boolean parseArgs(String[] args, Job job) throws IOException { default: // use the current timestamp as the "run id" of the test: this will // be used as simulating the cluster timestamp for apps - conf.setLong(SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID, + conf.setLong( + SimpleEntityWriterConstants.TIMELINE_SERVICE_PERFORMANCE_RUN_ID, System.currentTimeMillis()); switch (timeline_service_version) { case TIMELINE_SERVICE_VERSION_2: diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java index edb825de33..2d3d6ed9e4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java @@ -181,10 +181,11 @@ public void serviceInit(Configuration conf) throws Exception { } if (enableTimelineAuxService) { conf.setStrings(YarnConfiguration.NM_AUX_SERVICES, - new String[] { ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID, TIMELINE_AUX_SERVICE_NAME }); + new String[] {ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID, + TIMELINE_AUX_SERVICE_NAME}); } else { conf.setStrings(YarnConfiguration.NM_AUX_SERVICES, - new String[] { ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID }); + new String[] {ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID}); } conf.setClass(String.format(YarnConfiguration.NM_AUX_SERVICE_FMT, ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID), ShuffleHandler.class, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java index c43bd62466..43805c8c98 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/package-info.java @@ -24,4 +24,3 @@ package org.apache.hadoop.yarn.api.records.timelineservice; import org.apache.hadoop.classification.InterfaceAudience; - diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 3787ff6364..3bb73f51fa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -500,7 +500,8 @@ public static boolean isAclEnabled(Configuration conf) { /** * The setting that controls whether yarn system metrics is published on the - * timeline server or not by RM and NM. This configuration setting is for ATS V2 + * timeline server or not by RM and NM. This configuration setting is for + * ATS v2. */ public static final String SYSTEM_METRICS_PUBLISHER_ENABLED = YARN_PREFIX + "system-metrics-publisher.enabled"; @@ -840,7 +841,7 @@ public static boolean isAclEnabled(Configuration conf) { /** Number of threads container manager uses.*/ public static final String NM_COLLECTOR_SERVICE_THREAD_COUNT = - NM_PREFIX + "collector-service.thread-count"; + NM_PREFIX + "collector-service.thread-count"; public static final int DEFAULT_NM_COLLECTOR_SERVICE_THREAD_COUNT = 5; /** Number of threads used in cleanup.*/ @@ -872,7 +873,7 @@ public static boolean isAclEnabled(Configuration conf) { /** Address where the collector service IPC is.*/ public static final String NM_COLLECTOR_SERVICE_ADDRESS = - NM_PREFIX + "collector-service.address"; + NM_PREFIX + "collector-service.address"; public static final int DEFAULT_NM_COLLECTOR_SERVICE_PORT = 8048; public static final String DEFAULT_NM_COLLECTOR_SERVICE_ADDRESS = "0.0.0.0:" + DEFAULT_NM_LOCALIZER_PORT; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java index ff6ebbd712..e0268a67b8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/TimelineServiceHelper.java @@ -36,6 +36,8 @@ private TimelineServiceHelper() { /** * Cast map to HashMap for generic type. * @param originalMap the map need to be casted + * @param key type + * @param value type * @return casted HashMap object */ public static HashMap mapCastToHashMap( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 907d09ecb8..b9949e1aed 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -102,7 +102,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; -import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import org.apache.log4j.LogManager; @@ -747,7 +746,7 @@ protected boolean finish() { DSEvent.DS_APP_ATTEMPT_END); } else { publishApplicationAttemptEvent(timelineClient, appAttemptID.toString(), - DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi); + DSEvent.DS_APP_ATTEMPT_END, domainId, appSubmitterUgi); } } @@ -858,7 +857,7 @@ public void onContainersCompleted(List completedContainers) { publishContainerEndEventOnTimelineServiceV2(containerStatus); } else { publishContainerEndEvent( - timelineClient, containerStatus, domainId, appSubmitterUgi); + timelineClient, containerStatus, domainId, appSubmitterUgi); } } } @@ -988,8 +987,8 @@ public void onContainerStarted(ContainerId containerId, container); } else { applicationMaster.publishContainerStartEvent( - applicationMaster.timelineClient, container, - applicationMaster.domainId, applicationMaster.appSubmitterUgi); + applicationMaster.timelineClient, container, + applicationMaster.domainId, applicationMaster.appSubmitterUgi); } } } @@ -1348,8 +1347,10 @@ Thread createLaunchContainerThread(Container allocatedContainer, private void publishContainerStartEventOnTimelineServiceV2( Container container) { - final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity(); + final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity + entity = + new org.apache.hadoop.yarn.api.records.timelineservice. + TimelineEntity(); entity.setId(container.getId().toString()); entity.setType(DSEntity.DS_CONTAINER.toString()); long ts = System.currentTimeMillis(); @@ -1381,8 +1382,10 @@ public TimelinePutResponse run() throws Exception { private void publishContainerEndEventOnTimelineServiceV2( final ContainerStatus container) { - final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity(); + final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity + entity = + new org.apache.hadoop.yarn.api.records.timelineservice. + TimelineEntity(); entity.setId(container.getContainerId().toString()); entity.setType(DSEntity.DS_CONTAINER.toString()); //entity.setDomainId(domainId); @@ -1412,8 +1415,10 @@ public TimelinePutResponse run() throws Exception { private void publishApplicationAttemptEventOnTimelineServiceV2( DSEvent appEvent) { - final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity(); + final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity + entity = + new org.apache.hadoop.yarn.api.records.timelineservice. + TimelineEntity(); entity.setId(appAttemptID.toString()); entity.setType(DSEntity.DS_APP_ATTEMPT.toString()); long ts = System.currentTimeMillis(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java index a01d21b611..b9b8c7f79b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java @@ -153,7 +153,8 @@ private void setupInternal(int numNodeManager, float timelineVersion) ProcfsBasedProcessTree.class.getName()); conf.setBoolean(YarnConfiguration.NM_PMEM_CHECK_ENABLED, true); conf.setBoolean(YarnConfiguration.NM_VMEM_CHECK_ENABLED, true); - conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING, + conf.setBoolean( + YarnConfiguration.YARN_MINICLUSTER_CONTROL_RESOURCE_MONITORING, true); conf.setBoolean(YarnConfiguration.RM_SYSTEM_METRICS_PUBLISHER_ENABLED, true); @@ -180,11 +181,13 @@ private void setupInternal(int numNodeManager, float timelineVersion) // disable v1 timeline server since we no longer have a server here // enable aux-service based timeline aggregators conf.set(YarnConfiguration.NM_AUX_SERVICES, TIMELINE_AUX_SERVICE_NAME); - conf.set(YarnConfiguration.NM_AUX_SERVICES + "." + TIMELINE_AUX_SERVICE_NAME - + ".class", PerNodeTimelineCollectorsAuxService.class.getName()); + conf.set(YarnConfiguration.NM_AUX_SERVICES + "." + + TIMELINE_AUX_SERVICE_NAME + ".class", + PerNodeTimelineCollectorsAuxService.class.getName()); conf.setClass(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS, FileSystemTimelineWriterImpl.class, - org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter.class); + org.apache.hadoop.yarn.server.timelineservice.storage. + TimelineWriter.class); } else { Assert.fail("Wrong timeline version number: " + timelineVersion); } @@ -395,7 +398,8 @@ public void run() { } if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED - && appReport.getFinalApplicationStatus() != FinalApplicationStatus.UNDEFINED) { + && appReport.getFinalApplicationStatus() != + FinalApplicationStatus.UNDEFINED) { break; } } @@ -431,7 +435,7 @@ public void run() { } private void checkTimelineV1(boolean haveDomain) throws Exception { - TimelineDomain domain = null; + TimelineDomain domain = null; if (haveDomain) { domain = yarnCluster.getApplicationHistoryServer() .getTimelineStore().getDomain("TEST_DOMAIN"); @@ -545,7 +549,7 @@ private void checkTimelineV2(boolean haveDomain, ApplicationId appId, if (numOfContainerFinishedOccurences > 0) { break; } else { - Thread.sleep(500l); + Thread.sleep(500L); } } Assert.assertEquals( @@ -577,7 +581,7 @@ private void checkTimelineV2(boolean haveDomain, ApplicationId appId, if (numOfStringOccurences > 0) { break; } else { - Thread.sleep(500l); + Thread.sleep(500L); } } Assert.assertEquals( @@ -631,8 +635,9 @@ private long getNumOfStringOccurences(File entityFile, String searchString) try { reader = new BufferedReader(new FileReader(entityFile)); while ((strLine = reader.readLine()) != null) { - if (strLine.trim().contains(searchString)) + if (strLine.trim().contains(searchString)) { actualCount++; + } } } finally { reader.close(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java index c55f20216b..c651d32cfa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShellWithNodeLabels.java @@ -30,9 +30,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.junit.Assert; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.TestName; import com.google.common.collect.ImmutableMap; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java index 7f64dd52f9..7acaf11191 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/AMRMClient.java @@ -463,15 +463,15 @@ public NMTokenCache getNMTokenCache() { /** * Register TimelineClient to AMRMClient. - * @param timelineClient + * @param client the timeline client to register */ - public void registerTimelineClient(TimelineClient timelineClient) { - this.timelineClient = timelineClient; + public void registerTimelineClient(TimelineClient client) { + this.timelineClient = client; } /** * Get registered timeline client. - * @return + * @return the registered timeline client */ public TimelineClient getRegisteredTimeineClient() { return this.timelineClient; @@ -481,7 +481,7 @@ public TimelineClient getRegisteredTimeineClient() { * Wait for check to return true for each 1000 ms. * See also {@link #waitFor(com.google.common.base.Supplier, int)} * and {@link #waitFor(com.google.common.base.Supplier, int, int)} - * @param check + * @param check the condition for which it should wait */ public void waitFor(Supplier check) throws InterruptedException { waitFor(check, 1000); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java index 7cdda1be25..28d20c8d43 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/AMRMClientAsync.java @@ -304,7 +304,7 @@ public void registerTimelineClient(TimelineClient timelineClient) { /** * Get registered timeline client. - * @return + * @return the registered timeline client */ public TimelineClient getRegisteredTimeineClient() { return client.getRegisteredTimeineClient(); @@ -325,7 +325,7 @@ public abstract void updateBlacklist(List blacklistAdditions, * Wait for check to return true for each 1000 ms. * See also {@link #waitFor(com.google.common.base.Supplier, int)} * and {@link #waitFor(com.google.common.base.Supplier, int, int)} - * @param check + * @param check the condition for which it should wait */ public void waitFor(Supplier check) throws InterruptedException { waitFor(check, 1000); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java index e043c2f2c5..cc76718dd0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java @@ -30,8 +30,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; -import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; -import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntityGroupId; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; @@ -55,10 +53,12 @@ public abstract class TimelineClient extends AbstractService implements * construct and initialize a timeline client if the following operations are * supposed to be conducted by that user. */ - protected ApplicationId contextAppId; + private ApplicationId contextAppId; /** * Creates an instance of the timeline v.1.x client. + * + * @return the created timeline client instance */ @Public public static TimelineClient createTimelineClient() { @@ -68,6 +68,10 @@ public static TimelineClient createTimelineClient() { /** * Creates an instance of the timeline v.2 client. + * + * @param appId the application id with which the timeline client is + * associated + * @return the created timeline client instance */ @Public public static TimelineClient createTimelineClient(ApplicationId appId) { @@ -91,8 +95,8 @@ protected TimelineClient(String name, ApplicationId appId) { * @param entities * the collection of {@link TimelineEntity} * @return the error information if the sent entities are not correctly stored - * @throws IOException - * @throws YarnException + * @throws IOException if there are I/O errors + * @throws YarnException if entities are incomplete/invalid */ @Public public abstract TimelinePutResponse putEntities( @@ -112,8 +116,8 @@ public abstract TimelinePutResponse putEntities( * @param entities * the collection of {@link TimelineEntity} * @return the error information if the sent entities are not correctly stored - * @throws IOException - * @throws YarnException + * @throws IOException if there are I/O errors + * @throws YarnException if entities are incomplete/invalid */ @Public public abstract TimelinePutResponse putEntities( @@ -212,15 +216,15 @@ public abstract void cancelDelegationToken( * for a non-v.2 timeline client instance, a YarnException is thrown. *

* - * @param entities - * the collection of {@link org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity} + * @param entities the collection of {@link + * org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity} * @throws IOException * @throws YarnException */ @Public public abstract void putEntities( - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities) - throws IOException, YarnException; + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... + entities) throws IOException, YarnException; /** *

@@ -230,15 +234,15 @@ public abstract void putEntities( * non-v.2 timeline client instance, a YarnException is thrown. *

* - * @param entities - * the collection of {@link org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity} + * @param entities the collection of {@link + * org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity} * @throws IOException * @throws YarnException */ @Public public abstract void putEntitiesAsync( - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities) - throws IOException, YarnException; + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... + entities) throws IOException, YarnException; /** *

diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java index 478efc4a31..4a5a443d02 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java @@ -391,8 +391,8 @@ public TimelinePutResponse putEntities( @Override public void putEntities( - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities) - throws IOException, YarnException { + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... + entities) throws IOException, YarnException { if (!timelineServiceV2) { throw new YarnException("v.2 method is invoked on a v.1.x client"); } @@ -401,8 +401,8 @@ public void putEntities( @Override public void putEntitiesAsync( - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... entities) - throws IOException, YarnException { + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity... + entities) throws IOException, YarnException { if (!timelineServiceV2) { throw new YarnException("v.2 method is invoked on a v.1.x client"); } @@ -494,7 +494,8 @@ protected void putObjects( throw new IOException(re); } if (resp == null || - resp.getClientResponseStatus() != ClientResponse.Status.OK) { + resp.getStatusInfo().getStatusCode() != + ClientResponse.Status.OK.getStatusCode()) { String msg = "Response from the timeline server is " + ((resp == null) ? "null": "not successful," + " HTTP error code: " + resp.getStatus() @@ -530,7 +531,8 @@ public Token run() // TODO we should add retry logic here if timelineServiceAddress is // not available immediately. return (Token) authUrl.getDelegationToken( - constructResURI(getConfig(), getTimelineServiceAddress(), false).toURL(), + constructResURI(getConfig(), + getTimelineServiceAddress(), false).toURL(), token, renewer, doAsUser); } }; @@ -911,17 +913,21 @@ public boolean shouldRetryOn(Exception e) { } private final class EntitiesHolder extends FutureTask { - private final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities; + private final + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities + entities; private final boolean isSync; EntitiesHolder( - final org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities, + final + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities + entities, final boolean isSync) { super(new Callable() { // publishEntities() public Void call() throws Exception { MultivaluedMap params = new MultivaluedMapImpl(); - params.add("appid", contextAppId.toString()); + params.add("appid", getContextAppId().toString()); params.add("async", Boolean.toString(!isSync)); putObjects("entities", params, entities); return null; @@ -935,7 +941,8 @@ public boolean isSync() { return isSync; } - public org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities getEntities() { + public org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities + getEntities() { return entities; } } @@ -947,7 +954,7 @@ public org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities getEn private class TimelineEntityDispatcher { /** * Time period for which the timelineclient will wait for draining after - * stop + * stop. */ private static final long DRAIN_TIME_PERIOD = 2000L; @@ -1063,17 +1070,20 @@ private void publishWithoutBlockingOnQueue( } public void dispatchEntities(boolean sync, - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity[] entitiesTobePublished) - throws YarnException { + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity[] + entitiesTobePublished) throws YarnException { if (executor.isShutdown()) { throw new YarnException("Timeline client is in the process of stopping," + " not accepting any more TimelineEntities"); } // wrap all TimelineEntity into TimelineEntities object - org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities entities = - new org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities(); - for (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity : entitiesTobePublished) { + org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities + entities = + new org.apache.hadoop.yarn.api.records.timelineservice. + TimelineEntities(); + for (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity + entity : entitiesTobePublished) { entities.addEntity(entity); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java index 592bfa3ad6..221969bdd6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/timelineservice/TestTimelineServiceRecords.java @@ -100,7 +100,8 @@ public void testTimelineEntities() throws Exception { } entity.addMetric(metric2); - TimelineMetric metric3 = new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE); + TimelineMetric metric3 = + new TimelineMetric(TimelineMetric.Type.SINGLE_VALUE); metric3.setId("test metric id 1"); metric3.addValue(4L, (short) 4); Assert.assertEquals("metric3 should equal to metric2! ", metric3, metric2); @@ -212,18 +213,22 @@ public void testFirstClassCitizenEntities() throws Exception { ApplicationAttemptId.newInstance( ApplicationId.newInstance(0, 1), 1), 1).toString()); - cluster.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(), flow1.getId()); + cluster.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(), + flow1.getId()); flow1 .setParent(TimelineEntityType.YARN_CLUSTER.toString(), cluster.getId()); flow1.addChild(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId()); flow2.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow1.getId()); - flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(), app1.getId()); - flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(), app2.getId()); + flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(), + app1.getId()); + flow2.addChild(TimelineEntityType.YARN_APPLICATION.toString(), + app2.getId()); app1.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId()); app1.addChild(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(), appAttempt.getId()); appAttempt - .setParent(TimelineEntityType.YARN_APPLICATION.toString(), app1.getId()); + .setParent(TimelineEntityType.YARN_APPLICATION.toString(), + app1.getId()); app2.setParent(TimelineEntityType.YARN_FLOW_RUN.toString(), flow2.getId()); appAttempt.addChild(TimelineEntityType.YARN_CONTAINER.toString(), container.getId()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java index 71dafdc846..5813340da2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClientV2Impl.java @@ -43,7 +43,7 @@ public class TestTimelineClientV2Impl { private static final Log LOG = LogFactory.getLog(TestTimelineClientV2Impl.class); private TestV2TimelineClient client; - private static long TIME_TO_SLEEP = 150; + private static final long TIME_TO_SLEEP = 150L; private static final String EXCEPTION_MSG = "Exception in the content"; @Before @@ -62,12 +62,12 @@ public void setup() { public TestName currTestName = new TestName(); private YarnConfiguration conf; - private TestV2TimelineClient createTimelineClient(YarnConfiguration conf) { + private TestV2TimelineClient createTimelineClient(YarnConfiguration config) { ApplicationId id = ApplicationId.newInstance(0, 0); - TestV2TimelineClient client = new TestV2TimelineClient(id); - client.init(conf); - client.start(); - return client; + TestV2TimelineClient tc = new TestV2TimelineClient(id); + tc.init(config); + tc.start(); + return tc; } private class TestV2TimelineClientForExceptionHandling @@ -76,12 +76,16 @@ public TestV2TimelineClientForExceptionHandling(ApplicationId id) { super(id); } - protected boolean throwYarnException; + private boolean throwYarnException; public void setThrowYarnException(boolean throwYarnException) { this.throwYarnException = throwYarnException; } + public boolean isThrowYarnException() { + return throwYarnException; + } + @Override protected void putObjects(URI base, String path, MultivaluedMap params, Object obj) @@ -123,7 +127,7 @@ public TestV2TimelineClient(ApplicationId id) { protected void putObjects(String path, MultivaluedMap params, Object obj) throws IOException, YarnException { - if (throwYarnException) { + if (isThrowYarnException()) { throw new YarnException("ActualException"); } publishedEntities.add((TimelineEntities) obj); @@ -139,17 +143,17 @@ protected void putObjects(String path, @Test public void testExceptionMultipleRetry() { - TestV2TimelineClientForExceptionHandling client = + TestV2TimelineClientForExceptionHandling c = new TestV2TimelineClientForExceptionHandling( ApplicationId.newInstance(0, 0)); int maxRetries = 2; conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, maxRetries); - client.init(conf); - client.start(); - client.setTimelineServiceAddress("localhost:12345"); + c.init(conf); + c.start(); + c.setTimelineServiceAddress("localhost:12345"); try { - client.putEntities(new TimelineEntity()); + c.putEntities(new TimelineEntity()); } catch (IOException e) { Assert.fail("YARN exception is expected"); } catch (YarnException e) { @@ -161,9 +165,9 @@ public void testExceptionMultipleRetry() { "TimelineClient has reached to max retry times : " + maxRetries)); } - client.setThrowYarnException(true); + c.setThrowYarnException(true); try { - client.putEntities(new TimelineEntity()); + c.putEntities(new TimelineEntity()); } catch (IOException e) { Assert.fail("YARN exception is expected"); } catch (YarnException e) { @@ -173,7 +177,7 @@ public void testExceptionMultipleRetry() { Assert.assertTrue("YARN exception is expected", cause.getMessage().contains(EXCEPTION_MSG)); } - client.stop(); + c.stop(); } @Test @@ -348,7 +352,6 @@ private void printReceivedEntities() { for (int i = 0; i < client.getNumOfTimelineEntitiesPublished(); i++) { TimelineEntities publishedEntities = client.getPublishedEntities(i); StringBuilder entitiesPerPublish = new StringBuilder(); - ; for (TimelineEntity entity : publishedEntities.getEntities()) { entitiesPerPublish.append(entity.getId()); entitiesPerPublish.append(","); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java index 34b9497f80..d3d815b218 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestTimelineServiceHelper.java @@ -37,18 +37,21 @@ public void testMapCastToHashMap() { // Test empty hashmap be casted to a empty hashmap Map emptyHashMap = new HashMap(); - Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(emptyHashMap).size(), 0); + Assert.assertEquals( + TimelineServiceHelper.mapCastToHashMap(emptyHashMap).size(), 0); // Test empty non-hashmap be casted to a empty hashmap Map emptyTreeMap = new TreeMap(); - Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(emptyTreeMap).size(), 0); + Assert.assertEquals( + TimelineServiceHelper.mapCastToHashMap(emptyTreeMap).size(), 0); // Test non-empty hashmap be casted to hashmap correctly Map firstHashMap = new HashMap(); String key = "KEY"; String value = "VALUE"; firstHashMap.put(key, value); - Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(firstHashMap), firstHashMap); + Assert.assertEquals( + TimelineServiceHelper.mapCastToHashMap(firstHashMap), firstHashMap); // Test non-empty non-hashmap is casted correctly. Map firstTreeMap = new TreeMap(); @@ -59,17 +62,21 @@ public void testMapCastToHashMap() { Assert.assertEquals(alternateHashMap.get(key), value); // Test complicated hashmap be casted correctly - Map> complicatedHashMap = new HashMap>(); + Map> complicatedHashMap = + new HashMap>(); Set hashSet = new HashSet(); hashSet.add(value); complicatedHashMap.put(key, hashSet); - Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(complicatedHashMap), + Assert.assertEquals( + TimelineServiceHelper.mapCastToHashMap(complicatedHashMap), complicatedHashMap); // Test complicated non-hashmap get casted correctly - Map> complicatedTreeMap = new TreeMap>(); + Map> complicatedTreeMap = + new TreeMap>(); complicatedTreeMap.put(key, hashSet); - Assert.assertEquals(TimelineServiceHelper.mapCastToHashMap(complicatedTreeMap).get(key), + Assert.assertEquals( + TimelineServiceHelper.mapCastToHashMap(complicatedTreeMap).get(key), hashSet); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java index d23c04a51c..64eea63160 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocol.java @@ -48,9 +48,9 @@ public interface CollectorNodemanagerProtocol { * * @param request the request of registering a new collector or a list of * collectors - * @return - * @throws YarnException - * @throws IOException + * @return the response for registering the new collector + * @throws YarnException if the request is invalid + * @throws IOException if there are I/O errors */ ReportNewCollectorInfoResponse reportNewCollectorInfo( ReportNewCollectorInfoRequest request) @@ -63,9 +63,9 @@ ReportNewCollectorInfoResponse reportNewCollectorInfo( *

* @param request the request of getting the aggregator context information of * the given application - * @return - * @throws YarnException - * @throws IOException + * @return the response for registering the new collector + * @throws YarnException if the request is invalid + * @throws IOException if there are I/O errors */ GetTimelineCollectorContextResponse getTimelineCollectorContext( GetTimelineCollectorContextRequest request) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java index 655e9890a2..24f7c3dd53 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/CollectorNodemanagerProtocolPB.java @@ -25,7 +25,8 @@ @Private @Unstable @ProtocolInfo( - protocolName = "org.apache.hadoop.yarn.server.api.CollectorNodemanagerProtocolPB", + protocolName = + "org.apache.hadoop.yarn.server.api.CollectorNodemanagerProtocolPB", protocolVersion = 1) public interface CollectorNodemanagerProtocolPB extends CollectorNodemanagerProtocolService.BlockingInterface { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java index b9e17f22ea..bc50ac534c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java @@ -53,7 +53,7 @@ public class CollectorNodemanagerProtocolPBClientImpl implements + "rpc.nm-command-timeout"; /** - * Maximum of 1 minute timeout for a Node to react to the command + * Maximum of 1 minute timeout for a Node to react to the command. */ static final int DEFAULT_COMMAND_TIMEOUT = 60000; @@ -63,7 +63,7 @@ public class CollectorNodemanagerProtocolPBClientImpl implements public CollectorNodemanagerProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, CollectorNodemanagerProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine.class); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); int expireIntvl = conf.getInt(NM_COMMAND_TIMEOUT, DEFAULT_COMMAND_TIMEOUT); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java index 21fb270285..7b93a68a29 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/CollectorNodemanagerProtocolPBServiceImpl.java @@ -41,7 +41,8 @@ public class CollectorNodemanagerProtocolPBServiceImpl implements private CollectorNodemanagerProtocol real; - public CollectorNodemanagerProtocolPBServiceImpl(CollectorNodemanagerProtocol impl) { + public CollectorNodemanagerProtocolPBServiceImpl( + CollectorNodemanagerProtocol impl) { this.real = impl; } @@ -52,7 +53,8 @@ public ReportNewCollectorInfoResponseProto reportNewCollectorInfo( ReportNewCollectorInfoRequestPBImpl request = new ReportNewCollectorInfoRequestPBImpl(proto); try { - ReportNewCollectorInfoResponse response = real.reportNewCollectorInfo(request); + ReportNewCollectorInfoResponse response = + real.reportNewCollectorInfo(request); return ((ReportNewCollectorInfoResponsePBImpl)response).getProto(); } catch (YarnException e) { throw new ServiceException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java index b53b55b14f..7014388c80 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextRequestPBImpl.java @@ -29,10 +29,10 @@ public class GetTimelineCollectorContextRequestPBImpl extends GetTimelineCollectorContextRequest { - GetTimelineCollectorContextRequestProto + private GetTimelineCollectorContextRequestProto proto = GetTimelineCollectorContextRequestProto.getDefaultInstance(); - GetTimelineCollectorContextRequestProto.Builder builder = null; - boolean viaProto = false; + private GetTimelineCollectorContextRequestProto.Builder builder = null; + private boolean viaProto = false; private ApplicationId appId = null; @@ -60,8 +60,9 @@ public int hashCode() { @Override public boolean equals(Object other) { - if (other == null) + if (other == null) { return false; + } if (other.getClass().isAssignableFrom(this.getClass())) { return this.getProto().equals(this.getClass().cast(other).getProto()); } @@ -80,8 +81,9 @@ private void mergeLocalToBuilder() { } private void mergeLocalToProto() { - if (viaProto) + if (viaProto) { maybeInitBuilder(); + } mergeLocalToBuilder(); proto = builder.build(); viaProto = true; @@ -100,7 +102,8 @@ public ApplicationId getApplicationId() { return this.appId; } - GetTimelineCollectorContextRequestProtoOrBuilder p = viaProto ? proto : builder; + GetTimelineCollectorContextRequestProtoOrBuilder p = + viaProto ? proto : builder; if (!p.hasAppId()) { return null; } @@ -110,14 +113,16 @@ public ApplicationId getApplicationId() { } @Override - public void setApplicationId(ApplicationId appId) { + public void setApplicationId(ApplicationId id) { maybeInitBuilder(); - if (appId == null) + if (id == null) { builder.clearAppId(); - this.appId = appId; + } + this.appId = id; } - private ApplicationIdPBImpl convertFromProtoFormat(YarnProtos.ApplicationIdProto p) { + private ApplicationIdPBImpl convertFromProtoFormat( + YarnProtos.ApplicationIdProto p) { return new ApplicationIdPBImpl(p); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java index 34713cb773..151b0363ec 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetTimelineCollectorContextResponsePBImpl.java @@ -26,10 +26,10 @@ public class GetTimelineCollectorContextResponsePBImpl extends GetTimelineCollectorContextResponse { - GetTimelineCollectorContextResponseProto proto = + private GetTimelineCollectorContextResponseProto proto = GetTimelineCollectorContextResponseProto.getDefaultInstance(); - GetTimelineCollectorContextResponseProto.Builder builder = null; - boolean viaProto = false; + private GetTimelineCollectorContextResponseProto.Builder builder = null; + private boolean viaProto = false; public GetTimelineCollectorContextResponsePBImpl() { builder = GetTimelineCollectorContextResponseProto.newBuilder(); @@ -55,8 +55,9 @@ public int hashCode() { @Override public boolean equals(Object other) { - if (other == null) + if (other == null) { return false; + } if (other.getClass().isAssignableFrom(this.getClass())) { return this.getProto().equals(this.getClass().cast(other).getProto()); } @@ -69,8 +70,9 @@ public String toString() { } private void mergeLocalToProto() { - if (viaProto) + if (viaProto) { maybeInitBuilder(); + } proto = builder.build(); viaProto = true; } @@ -84,7 +86,8 @@ private void maybeInitBuilder() { @Override public String getUserId() { - GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder; + GetTimelineCollectorContextResponseProtoOrBuilder p = + viaProto ? proto : builder; if (!p.hasUserId()) { return null; } @@ -103,7 +106,8 @@ public void setUserId(String userId) { @Override public String getFlowName() { - GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder; + GetTimelineCollectorContextResponseProtoOrBuilder p = + viaProto ? proto : builder; if (!p.hasFlowName()) { return null; } @@ -122,7 +126,8 @@ public void setFlowName(String flowName) { @Override public String getFlowVersion() { - GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder; + GetTimelineCollectorContextResponseProtoOrBuilder p = + viaProto ? proto : builder; if (!p.hasFlowVersion()) { return null; } @@ -141,7 +146,8 @@ public void setFlowVersion(String flowVersion) { @Override public long getFlowRunId() { - GetTimelineCollectorContextResponseProtoOrBuilder p = viaProto ? proto : builder; + GetTimelineCollectorContextResponseProtoOrBuilder p = + viaProto ? proto : builder; return p.getFlowRunId(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java index 912a7a3c45..d0c11985db 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java @@ -58,7 +58,7 @@ public class NodeHeartbeatRequestPBImpl extends NodeHeartbeatRequest { private Set labels = null; private List logAggregationReportsForApps = null; - Map registeredCollectors = null; + private Map registeredCollectors = null; public NodeHeartbeatRequestPBImpl() { builder = NodeHeartbeatRequestProto.newBuilder(); @@ -161,10 +161,11 @@ private LogAggregationReportProto convertToProtoFormat( private void addRegisteredCollectorsToProto() { maybeInitBuilder(); builder.clearRegisteredCollectors(); - for (Map.Entry entry : registeredCollectors.entrySet()) { + for (Map.Entry entry : + registeredCollectors.entrySet()) { builder.addRegisteredCollectors(AppCollectorsMapProto.newBuilder() - .setAppId(convertToProtoFormat(entry.getKey())) - .setAppCollectorAddr(entry.getValue())); + .setAppId(convertToProtoFormat(entry.getKey())) + .setAppCollectorAddr(entry.getValue())); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java index 788ef588d6..cd85241c51 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java @@ -69,7 +69,7 @@ public class NodeHeartbeatResponsePBImpl extends private List applicationsToCleanup = null; private Map systemCredentials = null; private Resource resource = null; - Map appCollectorsMap = null; + private Map appCollectorsMap = null; private MasterKey containerTokenMasterKey = null; private MasterKey nmTokenMasterKey = null; @@ -148,8 +148,8 @@ private void addAppCollectorsMapToProto() { builder.clearAppCollectorsMap(); for (Map.Entry entry : appCollectorsMap.entrySet()) { builder.addAppCollectorsMap(AppCollectorsMapProto.newBuilder() - .setAppId(convertToProtoFormat(entry.getKey())) - .setAppCollectorAddr(entry.getValue())); + .setAppId(convertToProtoFormat(entry.getKey())) + .setAppCollectorAddr(entry.getValue())); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoRequestPBImpl.java index 5dd8f17fed..c6f66194e5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoRequestPBImpl.java @@ -30,11 +30,11 @@ public class ReportNewCollectorInfoRequestPBImpl extends ReportNewCollectorInfoRequest { - ReportNewCollectorInfoRequestProto proto = + private ReportNewCollectorInfoRequestProto proto = ReportNewCollectorInfoRequestProto.getDefaultInstance(); - ReportNewCollectorInfoRequestProto.Builder builder = null; - boolean viaProto = false; + private ReportNewCollectorInfoRequestProto.Builder builder = null; + private boolean viaProto = false; private List collectorsList = null; @@ -62,8 +62,9 @@ public int hashCode() { @Override public boolean equals(Object other) { - if (other == null) + if (other == null) { return false; + } if (other.getClass().isAssignableFrom(this.getClass())) { return this.getProto().equals(this.getClass().cast(other).getProto()); } @@ -71,8 +72,9 @@ public boolean equals(Object other) { } private void mergeLocalToProto() { - if (viaProto) + if (viaProto) { maybeInitBuilder(); + } mergeLocalToBuilder(); proto = builder.build(); viaProto = true; @@ -104,10 +106,10 @@ private void addLocalCollectorsToProto() { private void initLocalCollectorsList() { ReportNewCollectorInfoRequestProtoOrBuilder p = viaProto ? proto : builder; - List collectorsList = + List list = p.getAppCollectorsList(); this.collectorsList = new ArrayList(); - for (AppCollectorsMapProto m : collectorsList) { + for (AppCollectorsMapProto m : list) { this.collectorsList.add(convertFromProtoFormat(m)); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoResponsePBImpl.java index 7c90675b6a..5f2a10aace 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoResponsePBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReportNewCollectorInfoResponsePBImpl.java @@ -29,18 +29,19 @@ public class ReportNewCollectorInfoResponsePBImpl extends ReportNewCollectorInfoResponse { - ReportNewCollectorInfoResponseProto proto = + private ReportNewCollectorInfoResponseProto proto = ReportNewCollectorInfoResponseProto.getDefaultInstance(); - ReportNewCollectorInfoResponseProto.Builder builder = null; + private ReportNewCollectorInfoResponseProto.Builder builder = null; - boolean viaProto = false; + private boolean viaProto = false; public ReportNewCollectorInfoResponsePBImpl() { builder = ReportNewCollectorInfoResponseProto.newBuilder(); } - public ReportNewCollectorInfoResponsePBImpl(ReportNewCollectorInfoResponseProto proto) { + public ReportNewCollectorInfoResponsePBImpl( + ReportNewCollectorInfoResponseProto proto) { this.proto = proto; viaProto = true; } @@ -58,8 +59,9 @@ public int hashCode() { @Override public boolean equals(Object other) { - if (other == null) + if (other == null) { return false; + } if (other.getClass().isAssignableFrom(this.getClass())) { return this.getProto().equals(this.getClass().cast(other).getProto()); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/AppCollectorsMapPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/AppCollectorsMapPBImpl.java index eb3bde4df6..3740035d83 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/AppCollectorsMapPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/AppCollectorsMapPBImpl.java @@ -33,11 +33,11 @@ @Unstable public class AppCollectorsMapPBImpl extends AppCollectorsMap { - AppCollectorsMapProto proto = + private AppCollectorsMapProto proto = AppCollectorsMapProto.getDefaultInstance(); - AppCollectorsMapProto.Builder builder = null; - boolean viaProto = false; + private AppCollectorsMapProto.Builder builder = null; + private boolean viaProto = false; private ApplicationId appId = null; private String collectorAddr = null; @@ -52,7 +52,7 @@ public AppCollectorsMapPBImpl(AppCollectorsMapProto proto) { } public AppCollectorsMapProto getProto() { - mergeLocalToProto(); + mergeLocalToProto(); proto = viaProto ? proto : builder.build(); viaProto = true; return proto; @@ -65,8 +65,9 @@ public int hashCode() { @Override public boolean equals(Object other) { - if (other == null) + if (other == null) { return false; + } if (other.getClass().isAssignableFrom(this.getClass())) { return this.getProto().equals(this.getClass().cast(other).getProto()); } @@ -98,12 +99,12 @@ public String getCollectorAddr() { } @Override - public void setApplicationId(ApplicationId appId) { + public void setApplicationId(ApplicationId id) { maybeInitBuilder(); - if (appId == null) { + if (id == null) { builder.clearAppId(); } - this.appId = appId; + this.appId = id; } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java index da8e22473f..e25f5281fb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java @@ -77,7 +77,8 @@ public class TestRPC { private static final String EXCEPTION_MSG = "test error"; private static final String EXCEPTION_CAUSE = "exception cause"; - private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); + private static final RecordFactory RECORD_FACTORY = + RecordFactoryProvider.getRecordFactory(null); public static final String ILLEGAL_NUMBER_MESSAGE = "collectors' number in ReportNewCollectorInfoRequest is not ONE."; @@ -101,7 +102,8 @@ public void testUnknownCall() { // Any unrelated protocol would do ApplicationClientProtocol proxy = (ApplicationClientProtocol) rpc.getProxy( - ApplicationClientProtocol.class, NetUtils.getConnectAddress(server), conf); + ApplicationClientProtocol.class, NetUtils.getConnectAddress(server), + conf); try { proxy.getNewApplication(Records @@ -111,7 +113,8 @@ public void testUnknownCall() { Assert.assertTrue(e.getMessage().matches( "Unknown method getNewApplication called on.*" + "org.apache.hadoop.yarn.proto.ApplicationClientProtocol" - + "\\$ApplicationClientProtocolService\\$BlockingInterface protocol.")); + + "\\$ApplicationClientProtocolService\\$BlockingInterface " + + "protocol.")); } catch (Exception e) { e.printStackTrace(); } finally { @@ -132,8 +135,10 @@ public void testRPCOnCollectorNodeManagerProtocol() throws IOException { server.start(); // Test unrelated protocol wouldn't get response - ApplicationClientProtocol unknownProxy = (ApplicationClientProtocol) rpc.getProxy( - ApplicationClientProtocol.class, NetUtils.getConnectAddress(server), conf); + ApplicationClientProtocol unknownProxy = + (ApplicationClientProtocol) rpc.getProxy( + ApplicationClientProtocol.class, NetUtils.getConnectAddress(server), + conf); try { unknownProxy.getNewApplication(Records @@ -143,14 +148,17 @@ public void testRPCOnCollectorNodeManagerProtocol() throws IOException { Assert.assertTrue(e.getMessage().matches( "Unknown method getNewApplication called on.*" + "org.apache.hadoop.yarn.proto.ApplicationClientProtocol" - + "\\$ApplicationClientProtocolService\\$BlockingInterface protocol.")); + + "\\$ApplicationClientProtocolService\\$BlockingInterface " + + "protocol.")); } catch (Exception e) { e.printStackTrace(); } // Test CollectorNodemanagerProtocol get proper response - CollectorNodemanagerProtocol proxy = (CollectorNodemanagerProtocol)rpc.getProxy( - CollectorNodemanagerProtocol.class, NetUtils.getConnectAddress(server), conf); + CollectorNodemanagerProtocol proxy = + (CollectorNodemanagerProtocol)rpc.getProxy( + CollectorNodemanagerProtocol.class, NetUtils.getConnectAddress(server), + conf); // Verify request with DEFAULT_APP_ID and DEFAULT_COLLECTOR_ADDR get // normally response. try { @@ -196,7 +204,8 @@ public void testRPCOnCollectorNodeManagerProtocol() throws IOException { Assert.fail("RPC call failured is expected here."); } catch (YarnException | IOException e) { Assert.assertTrue(e instanceof YarnException); - Assert.assertTrue(e.getMessage().contains("The application is not found.")); + Assert.assertTrue(e.getMessage().contains( + "The application is not found.")); } server.stop(); } @@ -215,12 +224,13 @@ private void test(String rpcClass) throws Exception { Server server = rpc.getServer(ContainerManagementProtocol.class, new DummyContainerManager(), addr, conf, null, 1); server.start(); - RPC.setProtocolEngine(conf, ContainerManagementProtocolPB.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, ContainerManagementProtocolPB.class, + ProtobufRpcEngine.class); ContainerManagementProtocol proxy = (ContainerManagementProtocol) rpc.getProxy(ContainerManagementProtocol.class, NetUtils.getConnectAddress(server), conf); ContainerLaunchContext containerLaunchContext = - recordFactory.newRecordInstance(ContainerLaunchContext.class); + RECORD_FACTORY.newRecordInstance(ContainerLaunchContext.class); ApplicationId applicationId = ApplicationId.newInstance(0, 0); ApplicationAttemptId applicationAttemptId = @@ -257,10 +267,10 @@ private void test(String rpcClass) throws Exception { boolean exception = false; try { StopContainersRequest stopRequest = - recordFactory.newRecordInstance(StopContainersRequest.class); + RECORD_FACTORY.newRecordInstance(StopContainersRequest.class); stopRequest.setContainerIds(containerIds); proxy.stopContainers(stopRequest); - } catch (YarnException e) { + } catch (YarnException e) { exception = true; Assert.assertTrue(e.getMessage().contains(EXCEPTION_MSG)); Assert.assertTrue(e.getMessage().contains(EXCEPTION_CAUSE)); @@ -284,7 +294,7 @@ public GetContainerStatusesResponse getContainerStatuses( GetContainerStatusesRequest request) throws YarnException { GetContainerStatusesResponse response = - recordFactory.newRecordInstance(GetContainerStatusesResponse.class); + RECORD_FACTORY.newRecordInstance(GetContainerStatusesResponse.class); response.setContainerStatuses(statuses); return response; } @@ -293,8 +303,9 @@ public GetContainerStatusesResponse getContainerStatuses( public StartContainersResponse startContainers( StartContainersRequest requests) throws YarnException { StartContainersResponse response = - recordFactory.newRecordInstance(StartContainersResponse.class); - for (StartContainerRequest request : requests.getStartContainerRequests()) { + RECORD_FACTORY.newRecordInstance(StartContainersResponse.class); + for (StartContainerRequest request : + requests.getStartContainerRequests()) { Token containerToken = request.getContainerToken(); ContainerTokenIdentifier tokenId = null; @@ -304,7 +315,7 @@ public StartContainersResponse startContainers( throw RPCUtil.getRemoteException(e); } ContainerStatus status = - recordFactory.newRecordInstance(ContainerStatus.class); + RECORD_FACTORY.newRecordInstance(ContainerStatus.class); status.setState(ContainerState.RUNNING); status.setContainerId(tokenId.getContainerID()); status.setExitStatus(0); @@ -324,7 +335,8 @@ public StopContainersResponse stopContainers(StopContainersRequest request) @Override public IncreaseContainersResourceResponse increaseContainersResource( - IncreaseContainersResourceRequest request) throws YarnException, IOException { + IncreaseContainersResourceRequest request) + throws YarnException, IOException { return null; } @@ -383,7 +395,8 @@ public ReportNewCollectorInfoResponse reportNewCollectorInfo( } ReportNewCollectorInfoResponse response = - recordFactory.newRecordInstance(ReportNewCollectorInfoResponse.class); + RECORD_FACTORY.newRecordInstance( + ReportNewCollectorInfoResponse.class); return response; } @@ -392,8 +405,8 @@ public GetTimelineCollectorContextResponse getTimelineCollectorContext( GetTimelineCollectorContextRequest request) throws YarnException, IOException { if (request.getApplicationId().getId() == 1) { - return GetTimelineCollectorContextResponse.newInstance( - "test_user_id", "test_flow_name", "test_flow_version", 12345678L); + return GetTimelineCollectorContextResponse.newInstance( + "test_user_id", "test_flow_name", "test_flow_version", 12345678L); } else { throw new YarnException("The application is not found."); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java index 0c84be0e59..5bfbb8df1a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java @@ -187,8 +187,8 @@ protected ContainerManagerImpl createContainerManager(Context context, } } - protected NMCollectorService createNMCollectorService(Context context) { - return new NMCollectorService(context); + protected NMCollectorService createNMCollectorService(Context ctxt) { + return new NMCollectorService(ctxt); } protected WebServer createWebServer(Context nmContext, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java index b37b38e4f7..f692bf1723 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java @@ -816,7 +816,8 @@ public void run() { NodeStatusUpdaterImpl.this.context .getNMTokenSecretManager().getCurrentKey(), nodeLabelsForHeartbeat, - NodeStatusUpdaterImpl.this.context.getRegisteredCollectors()); + NodeStatusUpdaterImpl.this.context + .getRegisteredCollectors()); if (logAggregationEnabled) { // pull log aggregation status for application running in this NM @@ -939,23 +940,6 @@ public void run() { } } - /** - * Caller should take care of sending non null nodelabels for both - * arguments - * - * @param nodeLabelsNew - * @param nodeLabelsOld - * @return if the New node labels are diff from the older one. - */ - private boolean areNodeLabelsUpdated(Set nodeLabelsNew, - Set nodeLabelsOld) { - if (nodeLabelsNew.size() != nodeLabelsOld.size() - || !nodeLabelsOld.containsAll(nodeLabelsNew)) { - return true; - } - return false; - } - private void updateTimelineClientsAddress( NodeHeartbeatResponse response) { Map knownCollectorsMap = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java index 89e81ca6ed..cb63ae311d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java @@ -230,8 +230,7 @@ public ContainerManagerImpl(Context context, ContainerExecutor exec, nmMetricsPublisher = createNMTimelinePublisher(context); context.setNMTimelinePublisher(nmMetricsPublisher); } - this.containersMonitor = - new ContainersMonitorImpl(exec, dispatcher, this.context); + this.containersMonitor = createContainersMonitor(exec); addService(this.containersMonitor); dispatcher.register(ContainerEventType.class, @@ -447,8 +446,9 @@ protected SharedCacheUploadService createSharedCacheUploaderService() { } @VisibleForTesting - protected NMTimelinePublisher createNMTimelinePublisher(Context context) { - NMTimelinePublisher nmTimelinePublisherLocal = new NMTimelinePublisher(context); + protected NMTimelinePublisher createNMTimelinePublisher(Context ctxt) { + NMTimelinePublisher nmTimelinePublisherLocal = + new NMTimelinePublisher(ctxt); addIfService(nmTimelinePublisherLocal); return nmTimelinePublisherLocal; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationContainerFinishedEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationContainerFinishedEvent.java index 9cd34ccf58..0a8ffdff65 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationContainerFinishedEvent.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationContainerFinishedEvent.java @@ -25,7 +25,8 @@ public class ApplicationContainerFinishedEvent extends ApplicationEvent { private ContainerStatus containerStatus; public ApplicationContainerFinishedEvent(ContainerStatus containerStatus) { - super(containerStatus.getContainerId().getApplicationAttemptId().getApplicationId(), + super(containerStatus.getContainerId().getApplicationAttemptId(). + getApplicationId(), ApplicationEventType.APPLICATION_CONTAINER_FINISHED); this.containerStatus = containerStatus; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java index 00bd56bd0f..193dfead37 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java @@ -74,7 +74,6 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerMetrics; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStartMonitoringEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStopMonitoringEvent; -import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService; import org.apache.hadoop.yarn.server.nodemanager.recovery.NMStateStoreService.RecoveredContainerState; @@ -87,7 +86,6 @@ import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.util.Clock; -import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.SystemClock; import org.apache.hadoop.yarn.util.resource.Resources; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java index 4167ece602..e6a66bd9fe 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java @@ -435,8 +435,9 @@ public void run() { + " for the first time"); ResourceCalculatorProcessTree pt = - ResourceCalculatorProcessTree.getResourceCalculatorProcessTree( - pId, processTreeClass, conf); + ResourceCalculatorProcessTree. + getResourceCalculatorProcessTree( + pId, processTreeClass, conf); ptInfo.setPid(pId); ptInfo.setProcessTree(pt); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java index 39a61813ac..c4d45a90da 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java @@ -33,12 +33,12 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.timelineservice.ContainerEntity; -import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetricOperation; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.Identifier; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric; +import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetricOperation; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.Dispatcher; @@ -56,6 +56,8 @@ import org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; +import com.google.common.annotations.VisibleForTesting; + /** * Metrics publisher service that publishes data to the timeline service v.2. It * is used only if the timeline service v.2 is enabled and the system publishing @@ -73,7 +75,7 @@ public class NMTimelinePublisher extends CompositeService { private String httpAddress; - protected final Map appToClientMap; + private final Map appToClientMap; public NMTimelinePublisher(Context context) { super(NMTimelinePublisher.class.getName()); @@ -99,6 +101,11 @@ protected void serviceStart() throws Exception { this.httpAddress = nodeId.getHost() + ":" + context.getHttpPort(); } + @VisibleForTesting + Map getAppToClientMap() { + return appToClientMap; + } + protected void handleNMTimelineEvent(NMTimelineEvent event) { switch (event.getType()) { case TIMELINE_ENTITY_PUBLISH: diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java index 8cec5efd09..e7d18b7ecd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java @@ -85,7 +85,6 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; -import org.apache.hadoop.yarn.server.api.ContainerContext; import org.apache.hadoop.yarn.server.api.ResourceTracker; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; @@ -1707,9 +1706,9 @@ protected NMContext createNMContext( NMContainerTokenSecretManager containerTokenSecretManager, NMTokenSecretManagerInNM nmTokenSecretManager, NMStateStoreService store, boolean isDistributedSchedulingEnabled, - Configuration conf) { + Configuration config) { return new MyNMContext(containerTokenSecretManager, - nmTokenSecretManager, conf); + nmTokenSecretManager, config); } }; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java index 92744b2480..05ea03641e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java @@ -645,8 +645,9 @@ private Container createMockedContainer(ApplicationId appId, int containerId) { when(c.getLaunchContext()).thenReturn(launchContext); when(launchContext.getApplicationACLs()).thenReturn( new HashMap()); - when(c.cloneAndGetContainerStatus()).thenReturn(BuilderUtils.newContainerStatus(cId, - ContainerState.NEW, "", 0, Resource.newInstance(1024, 1))); + when(c.cloneAndGetContainerStatus()).thenReturn( + BuilderUtils.newContainerStatus(cId, + ContainerState.NEW, "", 0, Resource.newInstance(1024, 1))); return c; } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index 7a4fca3661..cc9e662f9e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -99,7 +99,6 @@ import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.AuxiliaryServiceHelper; -import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin; import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin; import org.hamcrest.CoreMatchers; @@ -111,16 +110,17 @@ public class TestContainerLaunch extends BaseContainerManagerTest { private static final String INVALID_JAVA_HOME = "/no/jvm/here"; - protected Context distContext = new NMContext(new NMContainerTokenSecretManager( - conf), new NMTokenSecretManagerInNM(), null, - new ApplicationACLsManager(conf), new NMNullStateStoreService(), false, - conf) { - public int getHttpPort() { - return HTTP_PORT; - }; - public NodeId getNodeId() { - return NodeId.newInstance("ahost", 1234); - }; + private Context distContext = + new NMContext(new NMContainerTokenSecretManager(conf), + new NMTokenSecretManagerInNM(), null, + new ApplicationACLsManager(conf), new NMNullStateStoreService(), + false, conf) { + public int getHttpPort() { + return HTTP_PORT; + }; + public NodeId getNodeId() { + return NodeId.newInstance("ahost", 1234); + }; }; public TestContainerLaunch() throws UnsupportedFileSystemException { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java index 4aa28d2dbe..ae9397a78e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/TestNMTimelinePublisher.java @@ -55,8 +55,8 @@ public void testContainerResourceUsage() { when(context.getHttpPort()).thenReturn(0); NMTimelinePublisher publisher = new NMTimelinePublisher(context) { public void createTimelineClient(ApplicationId appId) { - if (!appToClientMap.containsKey(appId)) { - appToClientMap.put(appId, timelineClient); + if (!getAppToClientMap().containsKey(appId)) { + getAppToClientMap().put(appId, timelineClient); } } }; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java index c98304001a..8feca21ccc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java @@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; @@ -40,10 +39,9 @@ public class MockApp implements Application { Map containers = new HashMap(); ApplicationState appState; Application app; - String flowName; - String flowVersion; - long flowRunId; - TimelineClient timelineClient = null; + private String flowName; + private String flowVersion; + private long flowRunId; public MockApp(int uniqId) { this("mockUser", 1234, uniqId); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java index e1e3298fda..1e702de934 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java @@ -373,7 +373,8 @@ public RMApplicationHistoryWriter getRMApplicationHistoryWriter() { @Override public void setRMTimelineCollectorManager( RMTimelineCollectorManager timelineCollectorManager) { - activeServiceContext.setRMTimelineCollectorManager(timelineCollectorManager); + activeServiceContext.setRMTimelineCollectorManager( + timelineCollectorManager); } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java index 61e7d650ca..7f4ed33f58 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java @@ -311,7 +311,7 @@ public void containerFinished(RMContainer container, long finishedTime) { entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO, container.getAllocatedNode().getHost()); entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO, - container.getAllocatedNode().getPort()); + container.getAllocatedNode().getPort()); entity.setOtherInfo(entityInfo); tEvent.setEventInfo(eventInfo); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java index 0da395a291..3ea4714976 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java @@ -76,9 +76,9 @@ public class TestSystemMetricsPublisherForV2 { /** - * is the folder where the FileSystemTimelineWriterImpl writes the entities + * The folder where the FileSystemTimelineWriterImpl writes the entities. */ - protected static File testRootDir = new File("target", + private static File testRootDir = new File("target", TestSystemMetricsPublisherForV2.class.getName() + "-localDir") .getAbsoluteFile(); @@ -151,7 +151,8 @@ private static Configuration getTimelineV2Conf() { } catch (IOException e) { e.printStackTrace(); Assert - .fail("Exception while setting the TIMELINE_SERVICE_STORAGE_DIR_ROOT "); + .fail("Exception while setting the " + + "TIMELINE_SERVICE_STORAGE_DIR_ROOT "); } return conf; } @@ -159,30 +160,30 @@ private static Configuration getTimelineV2Conf() { @Test public void testSystemMetricPublisherInitialization() { @SuppressWarnings("resource") - TimelineServiceV2Publisher metricsPublisher = + TimelineServiceV2Publisher publisher = new TimelineServiceV2Publisher(mock(RMContext.class)); try { Configuration conf = getTimelineV2Conf(); conf.setBoolean(YarnConfiguration.RM_PUBLISH_CONTAINER_EVENTS_ENABLED, YarnConfiguration.DEFAULT_RM_PUBLISH_CONTAINER_EVENTS_ENABLED); - metricsPublisher.init(conf); + publisher.init(conf); assertFalse( "Default configuration should not publish container events from RM", - metricsPublisher.isPublishContainerEvents()); + publisher.isPublishContainerEvents()); - metricsPublisher.stop(); + publisher.stop(); - metricsPublisher = new TimelineServiceV2Publisher(mock(RMContext.class)); + publisher = new TimelineServiceV2Publisher(mock(RMContext.class)); conf = getTimelineV2Conf(); - metricsPublisher.init(conf); + publisher.init(conf); assertTrue("Expected to have registered event handlers and set ready to " + "publish events after init", - metricsPublisher.isPublishContainerEvents()); - metricsPublisher.start(); + publisher.isPublishContainerEvents()); + publisher.start(); assertTrue("Expected to publish container events from RM", - metricsPublisher.isPublishContainerEvents()); + publisher.isPublishContainerEvents()); } finally { - metricsPublisher.stop(); + publisher.stop(); } } @@ -243,7 +244,7 @@ public void testPublishAppAttemptMetrics() throws Exception { + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; File appFile = new File(outputDirApp, timelineServiceFileName); Assert.assertTrue(appFile.exists()); - verifyEntity(appFile,2, AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE); + verifyEntity(appFile, 2, AppAttemptMetricsConstants.REGISTERED_EVENT_TYPE); } @Test(timeout = 10000) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java index 3a5c797af0..5a63547e2f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java @@ -134,7 +134,8 @@ public void testPutExtendedEntities() throws Exception { ApplicationEntity app = new ApplicationEntity(); app.setId(appId.toString()); flow.addChild(app.getType(), app.getId()); - ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1); + ApplicationAttemptId attemptId = + ApplicationAttemptId.newInstance(appId, 1); ApplicationAttemptEntity appAttempt = new ApplicationAttemptEntity(); appAttempt.setId(attemptId.toString()); ContainerId containerId = ContainerId.newContainerId(attemptId, 1); @@ -144,8 +145,10 @@ public void testPutExtendedEntities() throws Exception { user.setId(UserGroupInformation.getCurrentUser().getShortUserName()); QueueEntity queue = new QueueEntity(); queue.setId("default_queue"); - client.putEntities(cluster, flow, app, appAttempt, container, user, queue); - client.putEntitiesAsync(cluster, flow, app, appAttempt, container, user, queue); + client.putEntities(cluster, flow, app, appAttempt, container, user, + queue); + client.putEntitiesAsync(cluster, flow, app, appAttempt, container, user, + queue); } finally { client.stop(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java index 9af920553c..b1854488f8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java @@ -256,17 +256,17 @@ private static void loadData() throws Exception { entity5.addEvent(event54); Map> isRelatedTo1 = new HashMap>(); isRelatedTo1.put("type2", - Sets.newHashSet("entity21","entity22","entity23","entity24")); - isRelatedTo1.put("type4", Sets.newHashSet("entity41","entity42")); - isRelatedTo1.put("type1", Sets.newHashSet("entity14","entity15")); + Sets.newHashSet("entity21", "entity22", "entity23", "entity24")); + isRelatedTo1.put("type4", Sets.newHashSet("entity41", "entity42")); + isRelatedTo1.put("type1", Sets.newHashSet("entity14", "entity15")); isRelatedTo1.put("type3", Sets.newHashSet("entity31", "entity35", "entity32", "entity33")); entity5.addIsRelatedToEntities(isRelatedTo1); Map> relatesTo1 = new HashMap>(); relatesTo1.put("type2", - Sets.newHashSet("entity21","entity22","entity23","entity24")); - relatesTo1.put("type4", Sets.newHashSet("entity41","entity42")); - relatesTo1.put("type1", Sets.newHashSet("entity14","entity15")); + Sets.newHashSet("entity21", "entity22", "entity23", "entity24")); + relatesTo1.put("type4", Sets.newHashSet("entity41", "entity42")); + relatesTo1.put("type1", Sets.newHashSet("entity14", "entity15")); relatesTo1.put("type3", Sets.newHashSet("entity31", "entity35", "entity32", "entity33")); entity5.addRelatesToEntities(relatesTo1); @@ -317,16 +317,16 @@ private static void loadData() throws Exception { entity6.addEvent(event64); Map> isRelatedTo2 = new HashMap>(); isRelatedTo2.put("type2", - Sets.newHashSet("entity21","entity22","entity23","entity24")); - isRelatedTo2.put("type5", Sets.newHashSet("entity51","entity52")); - isRelatedTo2.put("type6", Sets.newHashSet("entity61","entity66")); + Sets.newHashSet("entity21", "entity22", "entity23", "entity24")); + isRelatedTo2.put("type5", Sets.newHashSet("entity51", "entity52")); + isRelatedTo2.put("type6", Sets.newHashSet("entity61", "entity66")); isRelatedTo2.put("type3", Sets.newHashSet("entity31")); entity6.addIsRelatedToEntities(isRelatedTo2); Map> relatesTo2 = new HashMap>(); relatesTo2.put("type2", - Sets.newHashSet("entity21","entity22","entity23","entity24")); - relatesTo2.put("type5", Sets.newHashSet("entity51","entity52")); - relatesTo2.put("type6", Sets.newHashSet("entity61","entity66")); + Sets.newHashSet("entity21", "entity22", "entity23", "entity24")); + relatesTo2.put("type5", Sets.newHashSet("entity51", "entity52")); + relatesTo2.put("type6", Sets.newHashSet("entity61", "entity66")); relatesTo2.put("type3", Sets.newHashSet("entity31")); entity6.addRelatesToEntities(relatesTo2); te5.addEntity(entity6); @@ -391,10 +391,11 @@ private static ClientResponse getResponse(Client client, URI uri) client.resource(uri).accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON).get(ClientResponse.class); if (resp == null || - resp.getClientResponseStatus() != ClientResponse.Status.OK) { + resp.getStatusInfo().getStatusCode() != + ClientResponse.Status.OK.getStatusCode()) { String msg = ""; if (resp != null) { - msg = resp.getClientResponseStatus().toString(); + msg = String.valueOf(resp.getStatusInfo().getStatusCode()); } throw new IOException("Incorrect response from timeline reader. " + "Status=" + msg); @@ -406,7 +407,8 @@ private static class DummyURLConnectionFactory implements HttpURLConnectionFactory { @Override - public HttpURLConnection getHttpURLConnection(final URL url) throws IOException { + public HttpURLConnection getHttpURLConnection(final URL url) + throws IOException { try { return (HttpURLConnection)url.openConnection(); } catch (UndeclaredThrowableException e) { @@ -422,10 +424,10 @@ private static TimelineEntity newEntity(String type, String id) { } private static TimelineMetric newMetric(TimelineMetric.Type type, - String id, long ts, Number value) { + String id, long t, Number value) { TimelineMetric metric = new TimelineMetric(type); metric.setId(id); - metric.addValue(ts, value); + metric.addValue(t, value); return metric; } @@ -463,7 +465,7 @@ private static void verifyHttpResponse(Client client, URI uri, .type(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertNotNull(resp); assertTrue("Response from server should have been " + status, - resp.getClientResponseStatus().equals(status)); + resp.getStatusInfo().getStatusCode() == status.getStatusCode()); System.out.println("Response is: " + resp.getEntity(String.class)); } @@ -866,7 +868,7 @@ public void testUIDQueryWithAndWithoutFlowContextInfo() throws Exception { String appUIDWithoutFlowInfo = "cluster1!application_1111111111_1111"; uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/"+ "app-uid/" + appUIDWithoutFlowInfo); - resp = getResponse(client, uri);; + resp = getResponse(client, uri); TimelineEntity appEntity2 = resp.getEntity(TimelineEntity.class); assertNotNull(appEntity2); assertEquals( @@ -893,7 +895,7 @@ public void testUIDQueryWithAndWithoutFlowContextInfo() throws Exception { String entityUIDWithFlowInfo = appUIDWithFlowInfo + "!type1!entity1"; uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/"+ "entity-uid/" + entityUIDWithFlowInfo); - resp = getResponse(client, uri);; + resp = getResponse(client, uri); TimelineEntity singleEntity1 = resp.getEntity(TimelineEntity.class); assertNotNull(singleEntity1); assertEquals("type1", singleEntity1.getType()); @@ -903,7 +905,7 @@ public void testUIDQueryWithAndWithoutFlowContextInfo() throws Exception { appUIDWithoutFlowInfo + "!type1!entity1"; uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/"+ "entity-uid/" + entityUIDWithoutFlowInfo); - resp = getResponse(client, uri);; + resp = getResponse(client, uri); TimelineEntity singleEntity2 = resp.getEntity(TimelineEntity.class); assertNotNull(singleEntity2); assertEquals("type1", singleEntity2.getType()); @@ -1054,12 +1056,12 @@ public void testGetApp() throws Exception { assertEquals("application_1111111111_2222", entity.getId()); assertEquals(1, entity.getMetrics().size()); TimelineMetric m4 = newMetric(TimelineMetric.Type.SINGLE_VALUE, - "MAP_SLOT_MILLIS", ts - 80000, 101L); + "MAP_SLOT_MILLIS", ts - 80000, 101L); for (TimelineMetric metric : entity.getMetrics()) { assertTrue(verifyMetrics(metric, m4)); } } finally { - client.destroy(); + client.destroy(); } } @@ -1425,7 +1427,8 @@ public void testGetEntitiesInfoFilters() throws Exception { uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=(info1%20eq%20cluster1%20AND%20info4%20" + - "eq%2035000)%20OR%20(info1%20eq%20cluster2%20AND%20info2%20eq%202.0)"); + "eq%2035000)%20OR%20(info1%20eq%20cluster2%20AND%20info2%20eq%202.0" + + ")"); resp = getResponse(client, uri); entities = resp.getEntity(new GenericType>(){}); assertNotNull(entities); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java index fd5a7f523f..a8de759bce 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java @@ -444,17 +444,17 @@ private static void loadEntities() throws IOException { te.addEntity(entity2); HBaseTimelineWriterImpl hbi = null; try { - hbi = new HBaseTimelineWriterImpl(util.getConfiguration()); - hbi.init(util.getConfiguration()); - hbi.start(); - String cluster = "cluster1"; - String user = "user1"; - String flow = "some_flow_name"; - String flowVersion = "AB7822C10F1111"; - long runid = 1002345678919L; - String appName = "application_1231111111_1111"; - hbi.write(cluster, user, flow, flowVersion, runid, appName, te); - hbi.stop(); + hbi = new HBaseTimelineWriterImpl(util.getConfiguration()); + hbi.init(util.getConfiguration()); + hbi.start(); + String cluster = "cluster1"; + String user = "user1"; + String flow = "some_flow_name"; + String flowVersion = "AB7822C10F1111"; + long runid = 1002345678919L; + String appName = "application_1231111111_1111"; + hbi.write(cluster, user, flow, flowVersion, runid, appName, te); + hbi.stop(); } finally { if (hbi != null) { hbi.stop(); @@ -531,7 +531,7 @@ public void testWriteNullApplicationToHBase() throws Exception { int count = 0; for (Result rr = resultScanner.next(); rr != null; rr = resultScanner.next()) { - count++; + count++; } // there should be no rows written // no exceptions thrown during write @@ -1173,7 +1173,7 @@ public void testEvents() throws IOException { for (TimelineEvent e : events) { assertEquals(eventId, e.getId()); assertEquals(expTs, Long.valueOf(e.getTimestamp())); - Map info = e.getInfo(); + Map info = e.getInfo(); assertEquals(1, info.size()); for (Map.Entry infoEntry : info.entrySet()) { assertEquals(expKey, infoEntry.getKey()); @@ -1249,7 +1249,7 @@ public void testEventsWithEmptyInfo() throws IOException { // the qualifier is a compound key // hence match individual values assertEquals(eventId, eventColumnName.getId()); - assertEquals(expTs,eventColumnName.getTimestamp()); + assertEquals(expTs, eventColumnName.getTimestamp()); // key must be empty assertNull(eventColumnName.getInfoKey()); Object value = e.getValue(); @@ -1280,7 +1280,7 @@ public void testEventsWithEmptyInfo() throws IOException { for (TimelineEvent e : events) { assertEquals(eventId, e.getId()); assertEquals(expTs, Long.valueOf(e.getTimestamp())); - Map info = e.getInfo(); + Map info = e.getInfo(); assertTrue(info == null || info.isEmpty()); } } finally { @@ -1337,7 +1337,7 @@ public void testEventsEscapeTs() throws IOException { for (TimelineEvent e : events) { assertEquals(eventId, e.getId()); assertEquals(expTs, e.getTimestamp()); - Map info = e.getInfo(); + Map info = e.getInfo(); assertEquals(1, info.size()); for (Map.Entry infoEntry : info.entrySet()) { assertEquals(expKey, infoEntry.getKey()); @@ -1417,14 +1417,14 @@ public void testNonIntegralMetricValues() throws IOException { public void testReadEntities() throws Exception { TimelineEntity entity = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertNotNull(entity); assertEquals(3, entity.getConfigs().size()); assertEquals(1, entity.getIsRelatedToEntities().size()); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(3, entities.size()); @@ -1460,7 +1460,7 @@ public void testReadEntities() throws Exception { public void testFilterEntitiesByCreatedTime() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, null, null, null, null, null), new TimelineDataToRetrieve()); assertEquals(3, entities.size()); @@ -1468,12 +1468,12 @@ public void testFilterEntitiesByCreatedTime() throws Exception { if (!entity.getId().equals("hello") && !entity.getId().equals("hello1") && !entity.getId().equals("hello2")) { Assert.fail("Entities with ids' hello, hello1 and hello2 should be" + - " present"); + " present"); } } entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, 1425016502015L, null, null, null, null, null, null, null), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); @@ -1485,15 +1485,15 @@ public void testFilterEntitiesByCreatedTime() throws Exception { } entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, 1425016502015L, null, null, null, null, null, null), new TimelineDataToRetrieve()); - assertEquals(1, entities.size()); - for (TimelineEntity entity : entities) { - if (!entity.getId().equals("hello")) { - Assert.fail("Entity with id hello should be present"); - } - } + assertEquals(1, entities.size()); + for (TimelineEntity entity : entities) { + if (!entity.getId().equals("hello")) { + Assert.fail("Entity with id hello should be present"); + } + } } @Test @@ -1518,7 +1518,7 @@ public void testReadEntitiesRelationsAndEventFiltersDefaultView() new HashSet(Arrays.asList("relatedto5")))); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, relatesTo, isRelatedTo, null, null, null, eventFilter), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); @@ -1547,7 +1547,7 @@ public void testReadEntitiesEventFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "end_event")); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -1568,7 +1568,7 @@ public void testReadEntitiesEventFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "end_event")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef1), new TimelineDataToRetrieve()); @@ -1587,7 +1587,7 @@ public void testReadEntitiesEventFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "end_event")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef2), new TimelineDataToRetrieve()); @@ -1609,7 +1609,7 @@ public void testReadEntitiesEventFilters() throws Exception { TimelineCompareOp.EQUAL, "dummy_event")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef3), new TimelineDataToRetrieve()); @@ -1626,7 +1626,7 @@ public void testReadEntitiesEventFilters() throws Exception { TimelineFilterList ef4 = new TimelineFilterList(Operator.OR, list1, list2); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef4), new TimelineDataToRetrieve()); @@ -1647,7 +1647,7 @@ public void testReadEntitiesEventFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "end_event")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef5), new TimelineDataToRetrieve()); @@ -1656,8 +1656,8 @@ public void testReadEntitiesEventFilters() throws Exception { for (TimelineEntity timelineEntity : entities) { eventCnt += timelineEntity.getEvents().size(); if (!timelineEntity.getId().equals("hello")) { - Assert.fail("Entity id should have been hello"); - } + Assert.fail("Entity id should have been hello"); + } } assertEquals(0, eventCnt); } @@ -1673,7 +1673,7 @@ public void testReadEntitiesIsRelatedTo() throws Exception { new HashSet(Arrays.asList("relatedto4")))); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt, null, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -1822,7 +1822,7 @@ public void testReadEntitiesRelatesTo() throws Exception { new HashSet(Arrays.asList("relatesto4")))); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt, null, null, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -2000,7 +2000,7 @@ public void testReadEntitiesRelatesTo() throws Exception { public void testReadEntitiesDefaultView() throws Exception { TimelineEntity e1 = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve()); assertNotNull(e1); assertTrue(e1.getInfo().isEmpty() && e1.getConfigs().isEmpty() && @@ -2008,7 +2008,7 @@ public void testReadEntitiesDefaultView() throws Exception { e1.getRelatesToEntities().isEmpty()); Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve()); assertEquals(3, es1.size()); @@ -2023,7 +2023,7 @@ public void testReadEntitiesDefaultView() throws Exception { public void testReadEntitiesByFields() throws Exception { TimelineEntity e1 = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve( null, null, EnumSet.of(Field.INFO, Field.CONFIGS), null)); assertNotNull(e1); @@ -2031,7 +2031,7 @@ public void testReadEntitiesByFields() throws Exception { assertEquals(0, e1.getIsRelatedToEntities().size()); Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve( null, null, EnumSet.of(Field.IS_RELATED_TO, Field.METRICS), null)); @@ -2056,13 +2056,13 @@ public void testReadEntitiesConfigPrefix() throws Exception { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_")); TimelineEntity e1 = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve(list, null, null, null)); assertNotNull(e1); assertEquals(1, e1.getConfigs().size()); Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve(list, null, null, null)); int cfgCnt = 0; @@ -2092,7 +2092,7 @@ public void testReadEntitiesConfigFilters() throws Exception { new TimelineFilterList(Operator.OR, list1, list2); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2106,7 +2106,7 @@ public void testReadEntitiesConfigFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -2122,7 +2122,7 @@ public void testReadEntitiesConfigFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "cfg_param1", "value1")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2141,7 +2141,7 @@ public void testReadEntitiesConfigFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "config_param2", "value2")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList2, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2153,7 +2153,7 @@ public void testReadEntitiesConfigFilters() throws Exception { TimelineCompareOp.EQUAL, "dummy_config", "value1")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList3, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2165,7 +2165,7 @@ public void testReadEntitiesConfigFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "dummy_config", "value1")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList4, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2177,7 +2177,7 @@ public void testReadEntitiesConfigFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "dummy_config", "value1", false)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList5, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2195,7 +2195,7 @@ public void testReadEntitiesConfigFilterPrefix() throws Exception { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_")); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(list, null, null, null)); @@ -2226,7 +2226,7 @@ public void testReadEntitiesConfigFilterPrefix() throws Exception { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "config_")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(confsToRetrieve, null, null, null)); @@ -2237,7 +2237,7 @@ public void testReadEntitiesConfigFilterPrefix() throws Exception { for (String confKey : entity.getConfigs().keySet()) { assertTrue("Config key returned should start with config_", confKey.startsWith("config_")); - } + } } assertEquals(2, cfgCnt); } @@ -2249,13 +2249,13 @@ public void testReadEntitiesMetricPrefix() throws Exception { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "MAP1_")); TimelineEntity e1 = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve(null, list, null, null)); assertNotNull(e1); assertEquals(1, e1.getMetrics().size()); Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, list, null, null)); int metricCnt = 0; @@ -2283,7 +2283,7 @@ public void testReadEntitiesMetricFilters() throws Exception { new TimelineFilterList(Operator.OR, list1, list2); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2297,7 +2297,7 @@ public void testReadEntitiesMetricFilters() throws Exception { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -2315,7 +2315,7 @@ public void testReadEntitiesMetricFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "MAP1_BYTES", 30)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList1, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2334,7 +2334,7 @@ public void testReadEntitiesMetricFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "MAP1_BYTES", 30)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList2, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2346,7 +2346,7 @@ public void testReadEntitiesMetricFilters() throws Exception { TimelineCompareOp.EQUAL, "dummy_metric", 5)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList3, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2358,7 +2358,7 @@ public void testReadEntitiesMetricFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "dummy_metric", 5)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList4, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2370,7 +2370,7 @@ public void testReadEntitiesMetricFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "dummy_metric", 5, false)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList5, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2388,7 +2388,7 @@ public void testReadEntitiesMetricFilterPrefix() throws Exception { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "MAP1_")); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, list, null, null)); @@ -2417,7 +2417,7 @@ public void testReadEntitiesMetricFilterPrefix() throws Exception { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "MAP1_")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList1, null), new TimelineDataToRetrieve( @@ -2436,9 +2436,9 @@ public void testReadEntitiesMetricFilterPrefix() throws Exception { assertEquals(2, metricCnt); entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", - "some_flow_name", 1002345678919L, "application_1231111111_1111","world", - null), new TimelineEntityFilters(null, null, null, null, null, null, - null, metricFilterList1, null), new TimelineDataToRetrieve(null, + "some_flow_name", 1002345678919L, "application_1231111111_1111", + "world", null), new TimelineEntityFilters(null, null, null, null, null, + null, null, metricFilterList1, null), new TimelineDataToRetrieve(null, metricsToRetrieve, EnumSet.of(Field.METRICS), Integer.MAX_VALUE)); assertEquals(2, entities.size()); metricCnt = 0; @@ -2471,7 +2471,7 @@ public void testReadEntitiesInfoFilters() throws Exception { new TimelineFilterList(Operator.OR, list1, list2); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2487,7 +2487,7 @@ public void testReadEntitiesInfoFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "infoMapKey1", "infoMapValue1")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList1, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2505,7 +2505,7 @@ public void testReadEntitiesInfoFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "infoMapKey3", 71.4)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList2, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2516,7 +2516,7 @@ public void testReadEntitiesInfoFilters() throws Exception { TimelineCompareOp.EQUAL, "dummy_info", "some_value")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList3, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2527,7 +2527,7 @@ public void testReadEntitiesInfoFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "dummy_info", "some_value")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList4, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2538,7 +2538,7 @@ public void testReadEntitiesInfoFilters() throws Exception { TimelineCompareOp.NOT_EQUAL, "dummy_info", "some_value", false)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList5, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2593,19 +2593,19 @@ public void testReadApps() throws Exception { @Test public void testFilterAppsByCreatedTime() throws Exception { Set entities = reader.getEntities( - new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), - null), - new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, - null, null, null, null, null), - new TimelineDataToRetrieve()); + new TimelineReaderContext("cluster1", "user1", "some_flow_name", + 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), + null), + new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, + null, null, null, null, null), + new TimelineDataToRetrieve()); assertEquals(3, entities.size()); for (TimelineEntity entity : entities) { if (!entity.getId().equals("application_1111111111_2222") && !entity.getId().equals("application_1111111111_3333") && !entity.getId().equals("application_1111111111_4444")) { Assert.fail("Entities with ids' application_1111111111_2222, " + - "application_1111111111_3333 and application_1111111111_4444" + + "application_1111111111_3333 and application_1111111111_4444" + " should be present"); } } @@ -2736,8 +2736,8 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt1, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt1, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -2760,8 +2760,8 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt2, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt2, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); isRelatedToCnt = 0; @@ -2783,8 +2783,8 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt3, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt3, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -2807,8 +2807,8 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt4, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt4, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -2820,8 +2820,8 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt5, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt5, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -2841,8 +2841,8 @@ public void testReadAppsIsRelatedTo() throws Exception { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt6, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt6, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -3335,8 +3335,8 @@ public void testReadAppsEventFilters() throws Exception { for (TimelineEntity timelineEntity : entities) { eventCnt += timelineEntity.getEvents().size(); if (!timelineEntity.getId().equals("application_1111111111_2222")) { - Assert.fail("Entity id should have been application_1111111111_2222"); - } + Assert.fail("Entity id should have been application_1111111111_2222"); + } } assertEquals(0, eventCnt); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java index 58d5e61c85..e34ae90b12 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java @@ -74,8 +74,8 @@ public static void cleanup() throws Exception { } private static PhoenixOfflineAggregationWriterImpl - setupPhoenixClusterAndWriterForTest(YarnConfiguration conf) - throws Exception{ + setupPhoenixClusterAndWriterForTest(YarnConfiguration conf) + throws Exception { Map props = new HashMap<>(); // Must update config before starting server props.put(QueryServices.STATS_USE_CURRENT_TIME_ATTRIB, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java index 0535a1316b..b6089873f4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java @@ -31,12 +31,14 @@ import org.apache.hadoop.conf.Configuration; /** - * Generates the data/entities for the FlowRun and FlowActivity Tables + * Generates the data/entities for the FlowRun and FlowActivity Tables. */ -class TestFlowDataGenerator { +final class TestFlowDataGenerator { + private TestFlowDataGenerator() { + } - private static final String metric1 = "MAP_SLOT_MILLIS"; - private static final String metric2 = "HDFS_BYTES_READ"; + private static final String METRIC_1 = "MAP_SLOT_MILLIS"; + private static final String METRIC_2 = "HDFS_BYTES_READ"; public static final long END_TS_INCR = 10000L; static TimelineEntity getEntityMetricsApp1(long insertTs, Configuration c1) { @@ -51,7 +53,7 @@ static TimelineEntity getEntityMetricsApp1(long insertTs, Configuration c1) { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = insertTs; @@ -64,10 +66,10 @@ static TimelineEntity getEntityMetricsApp1(long insertTs, Configuration c1) { metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); - m2.setId(metric2); + m2.setId(METRIC_2); metricValues = new HashMap(); ts = System.currentTimeMillis(); - for (int k=1; k< 100 ; k++) { + for (int k = 1; k < 100; k++) { metricValues.put(ts - k*100000L, 31L); } @@ -81,7 +83,8 @@ static TimelineEntity getEntityMetricsApp1(long insertTs, Configuration c1) { } - static TimelineEntity getEntityMetricsApp1Complete(long insertTs, Configuration c1) { + static TimelineEntity getEntityMetricsApp1Complete(long insertTs, + Configuration c1) { TimelineEntity entity = new TimelineEntity(); String id = "flowRunMetrics_test"; String type = TimelineEntityType.YARN_APPLICATION.toString(); @@ -93,7 +96,7 @@ static TimelineEntity getEntityMetricsApp1Complete(long insertTs, Configuration // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = insertTs; @@ -103,7 +106,7 @@ static TimelineEntity getEntityMetricsApp1Complete(long insertTs, Configuration metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); - m2.setId(metric2); + m2.setId(METRIC_2); metricValues = new HashMap(); ts = insertTs; metricValues.put(ts - 80000, 57L); @@ -134,7 +137,7 @@ static TimelineEntity getEntityMetricsApp1(long insertTs) { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = insertTs; metricValues.put(ts - 100000, 2L); @@ -144,7 +147,7 @@ static TimelineEntity getEntityMetricsApp1(long insertTs) { metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); - m2.setId(metric2); + m2.setId(METRIC_2); metricValues = new HashMap(); ts = insertTs; metricValues.put(ts - 100000, 31L); @@ -177,7 +180,7 @@ static TimelineEntity getEntityMetricsApp2(long insertTs) { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = insertTs; metricValues.put(ts - 100000, 5L); @@ -208,7 +211,7 @@ static TimelineEntity getEntity1() { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = System.currentTimeMillis(); metricValues.put(ts - 120000, 100000000L); @@ -232,7 +235,7 @@ static TimelineEntity getEntity1() { event = new TimelineEvent(); event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - long expTs = cTime + 21600000;// start time + 6hrs + long expTs = cTime + 21600000; // start time + 6hrs event.setTimestamp(expTs); event.addInfo(expKey, expVal); entity.addEvent(event); @@ -250,7 +253,7 @@ static TimelineEntity getAFullEntity(long ts, long endTs) { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); metricValues.put(ts - 120000, 100000000L); metricValues.put(ts - 100000, 200000000L); @@ -262,7 +265,7 @@ static TimelineEntity getAFullEntity(long ts, long endTs) { m1.setValues(metricValues); metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); - m2.setId(metric2); + m2.setId(METRIC_2); metricValues = new HashMap(); metricValues.put(ts - 900000, 31L); metricValues.put(ts - 30000, 57L); @@ -281,7 +284,7 @@ static TimelineEntity getAFullEntity(long ts, long endTs) { event = new TimelineEvent(); event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - long expTs = ts + 21600000;// start time + 6hrs + long expTs = ts + 21600000; // start time + 6hrs event.setTimestamp(expTs); event.addInfo(expKey, expVal); entity.addEvent(event); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java index 37490ff850..1906574b8c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java @@ -58,7 +58,7 @@ import org.junit.Test; /** - * Tests the FlowRun and FlowActivity Tables + * Tests the FlowRun and FlowActivity Tables. */ public class TestHBaseStorageFlowActivity { @@ -114,7 +114,7 @@ public void testWriteFlowRunMinMax() throws Exception { String appName = "application_100000000000_1111"; long minStartTs = 1424995200300L; long greaterStartTs = 1424995200300L + 864000L; - long endTs = 1424995200300L + 86000000L;; + long endTs = 1424995200300L + 86000000L; TimelineEntity entityMinStartTime = TestFlowDataGenerator .getEntityMinStartTime(minStartTs); @@ -209,7 +209,7 @@ public void testWriteFlowRunMinMax() throws Exception { /** * Write 1 application entity and checks the record for today in the flow - * activity table + * activity table. */ @Test public void testWriteFlowActivityOneFlow() throws Exception { @@ -313,10 +313,10 @@ private void checkFlowActivityTable(String cluster, String user, String flow, /** * Writes 3 applications each with a different run id and version for the same - * {cluster, user, flow} + * {cluster, user, flow}. * * They should be getting inserted into one record in the flow activity table - * with 3 columns, one per run id + * with 3 columns, one per run id. */ @Test public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException { @@ -425,7 +425,8 @@ private void checkFlowActivityTableSeveralRuns(String cluster, String user, s.setStartRow(startRow); String clusterStop = cluster + "1"; byte[] stopRow = - new FlowActivityRowKey(clusterStop, appCreatedTime, user, flow).getRowKey(); + new FlowActivityRowKey(clusterStop, appCreatedTime, user, flow) + .getRowKey(); s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); Table table1 = conn.getTable(TableName diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java index 6c4c810758..74b9e501ca 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java @@ -69,7 +69,7 @@ import org.junit.Test; /** - * Tests the FlowRun and FlowActivity Tables + * Tests the FlowRun and FlowActivity Tables. */ public class TestHBaseStorageFlowRun { @@ -356,18 +356,20 @@ public void testWriteFlowRunMetricsOneFlow() throws Exception { /* * checks the batch limits on a scan */ - void checkFlowRunTableBatchLimit(String cluster, String user, + void checkFlowRunTableBatchLimit(String cluster, String user, String flow, long runid, Configuration c1) throws IOException { Scan s = new Scan(); s.addFamily(FlowRunColumnFamily.INFO.getBytes()); - byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey(); + byte[] startRow = + new FlowRunRowKey(cluster, user, flow, runid).getRowKey(); s.setStartRow(startRow); // set a batch limit int batchLimit = 2; s.setBatch(batchLimit); String clusterStop = cluster + "1"; - byte[] stopRow = new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey(); + byte[] stopRow = + new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey(); s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); Table table1 = conn diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java index 71523b81d6..30940886f3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java @@ -63,14 +63,14 @@ import org.junit.Test; /** - * Tests the FlowRun and FlowActivity Tables + * Tests the FlowRun and FlowActivity Tables. */ public class TestHBaseStorageFlowRunCompaction { private static HBaseTestingUtility util; - private static final String metric1 = "MAP_SLOT_MILLIS"; - private static final String metric2 = "HDFS_BYTES_READ"; + private static final String METRIC_1 = "MAP_SLOT_MILLIS"; + private static final String METRIC_2 = "HDFS_BYTES_READ"; private final byte[] aRowKey = Bytes.toBytes("a"); private final byte[] aFamily = Bytes.toBytes("family"); @@ -89,8 +89,8 @@ private static void createSchema() throws IOException { TimelineSchemaCreator.createAllTables(util.getConfiguration(), false); } - /** writes non numeric data into flow run table - * reads it back + /** Writes non numeric data into flow run table + * reads it back. * * @throws Exception */ @@ -262,7 +262,7 @@ public void testWriteScanBatchLimit() throws Exception { .getFamilyMap(FlowRunColumnFamily.INFO.getBytes()); // we expect all back in one next call assertEquals(4, values.size()); - System.out.println(" values size " + values.size() + " " + batchLimit ); + System.out.println(" values size " + values.size() + " " + batchLimit); rowCount++; } // should get back 1 row with each invocation @@ -325,11 +325,12 @@ public void testWriteFlowRunCompaction() throws Exception { .valueOf(FlowRunTable.DEFAULT_TABLE_NAME)); List regions = server.getOnlineRegions(TableName .valueOf(FlowRunTable.DEFAULT_TABLE_NAME)); - assertTrue("Didn't find any regions for primary table!", regions.size() > 0); + assertTrue("Didn't find any regions for primary table!", + regions.size() > 0); // flush and compact all the regions of the primary table for (Region region : regions) { - region.flush(true); - region.compact(true); + region.flush(true); + region.compact(true); } // check flow run for one flow many apps @@ -363,13 +364,13 @@ private void checkFlowRunTable(String cluster, String user, String flow, rowCount++; // check metric1 byte[] q = ColumnHelper.getColumnQualifier( - FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric1); + FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), METRIC_1); assertTrue(values.containsKey(q)); assertEquals(141, Bytes.toLong(values.get(q))); // check metric2 q = ColumnHelper.getColumnQualifier( - FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric2); + FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), METRIC_2); assertTrue(values.containsKey(q)); assertEquals(57, Bytes.toLong(values.get(q))); } @@ -385,7 +386,7 @@ private FlowScanner getFlowScannerForTestingCompaction() { // okay to pass in nulls for the constructor arguments // because all we want to do is invoke the process summation FlowScanner fs = new FlowScanner(null, null, - (request.isMajor() == true ? FlowScannerOperation.MAJOR_COMPACTION + (request.isMajor() ? FlowScannerOperation.MAJOR_COMPACTION : FlowScannerOperation.MINOR_COMPACTION)); assertNotNull(fs); return fs; @@ -404,7 +405,7 @@ public void checkProcessSummationMoreCellsSumFinal2() long currentTimestamp = System.currentTimeMillis(); long cell1Ts = 1200120L; long cell2Ts = TimestampGenerator.getSupplementedTimestamp( - System.currentTimeMillis(),"application_123746661110_11202"); + System.currentTimeMillis(), "application_123746661110_11202"); long cell3Ts = 1277719L; long cell4Ts = currentTimestamp - 10; @@ -571,7 +572,8 @@ public void checkProcessSummationMoreCellsSumFinalMany() throws IOException { // of type SUM and SUM_FINAL // NOT cells of SUM_FINAL will expire @Test - public void checkProcessSummationMoreCellsSumFinalVariedTags() throws IOException { + public void checkProcessSummationMoreCellsSumFinalVariedTags() + throws IOException { FlowScanner fs = getFlowScannerForTestingCompaction(); int countFinal = 20100; int countNotFinal = 1000; @@ -585,7 +587,8 @@ public void checkProcessSummationMoreCellsSumFinalVariedTags() throws IOExceptio long cellTsFinalStart = 10001120L; long cellTsFinal = cellTsFinalStart; - long cellTsFinalStartNotExpire = TimestampGenerator.getSupplementedTimestamp( + long cellTsFinalStartNotExpire = + TimestampGenerator.getSupplementedTimestamp( System.currentTimeMillis(), "application_10266666661166_118821"); long cellTsFinalNotExpire = cellTsFinalStartNotExpire; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java index 2dff9373b9..29ef1f8c29 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java @@ -53,7 +53,6 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; import org.apache.hadoop.yarn.api.records.timelineservice.UserEntity; -import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.webapp.ForbiddenException; import org.apache.hadoop.yarn.webapp.NotFoundException; @@ -180,7 +179,7 @@ public Response putEntities( private static ApplicationId parseApplicationId(String appId) { try { if (appId != null) { - return ConverterUtils.toApplicationId(appId.trim()); + return ApplicationId.fromString(appId.trim()); } else { return null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java index eda14e67c1..03f508f131 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java @@ -26,4 +26,3 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; - diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java index f5f7aa6ff6..4cb46e63a8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java @@ -20,7 +20,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.ConverterUtils; /** * Encodes and decodes {@link ApplicationId} for row keys. @@ -50,7 +49,7 @@ public AppIdKeyConverter() { */ @Override public byte[] encode(String appIdStr) { - ApplicationId appId = ConverterUtils.toApplicationId(appIdStr); + ApplicationId appId = ApplicationId.fromString(appIdStr); byte[] appIdBytes = new byte[getKeySize()]; byte[] clusterTs = Bytes.toBytes( LongConverter.invertLong(appId.getClusterTimestamp())); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java index 288046c3f3..d03b37dbb3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java @@ -21,7 +21,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.ConverterUtils; /** * Utility class that allows HBase coprocessors to interact with unique @@ -99,7 +98,7 @@ private static long getAppIdSuffix(String appIdStr) { if (appIdStr == null) { return 0L; } - ApplicationId appId = ConverterUtils.toApplicationId(appIdStr); + ApplicationId appId = ApplicationId.fromString(appIdStr); long id = appId.getId() % TS_MULTIPLIER; return id; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java index 0e9578a84d..bb0e33133e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java @@ -26,4 +26,3 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; - diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java index 854e046b4d..7bc89c582c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java @@ -88,9 +88,9 @@ public void testStartWebApp() throws Exception { @Test(timeout=60000) public void testMultithreadedAdd() throws Exception { - final int NUM_APPS = 5; + final int numApps = 5; List> tasks = new ArrayList>(); - for (int i = 0; i < NUM_APPS; i++) { + for (int i = 0; i < numApps; i++) { final ApplicationId appId = ApplicationId.newInstance(0L, i); Callable task = new Callable() { public Boolean call() { @@ -101,7 +101,7 @@ public Boolean call() { }; tasks.add(task); } - ExecutorService executor = Executors.newFixedThreadPool(NUM_APPS); + ExecutorService executor = Executors.newFixedThreadPool(numApps); try { List> futures = executor.invokeAll(tasks); for (Future future: futures) { @@ -111,7 +111,7 @@ public Boolean call() { executor.shutdownNow(); } // check the keys - for (int i = 0; i < NUM_APPS; i++) { + for (int i = 0; i < numApps; i++) { final ApplicationId appId = ApplicationId.newInstance(0L, i); assertTrue(collectorManager.containsTimelineCollector(appId)); } @@ -119,9 +119,9 @@ public Boolean call() { @Test public void testMultithreadedAddAndRemove() throws Exception { - final int NUM_APPS = 5; + final int numApps = 5; List> tasks = new ArrayList>(); - for (int i = 0; i < NUM_APPS; i++) { + for (int i = 0; i < numApps; i++) { final ApplicationId appId = ApplicationId.newInstance(0L, i); Callable task = new Callable() { public Boolean call() { @@ -134,7 +134,7 @@ public Boolean call() { }; tasks.add(task); } - ExecutorService executor = Executors.newFixedThreadPool(NUM_APPS); + ExecutorService executor = Executors.newFixedThreadPool(numApps); try { List> futures = executor.invokeAll(tasks); for (Future future: futures) { @@ -144,16 +144,16 @@ public Boolean call() { executor.shutdownNow(); } // check the keys - for (int i = 0; i < NUM_APPS; i++) { + for (int i = 0; i < numApps; i++) { final ApplicationId appId = ApplicationId.newInstance(0L, i); assertFalse(collectorManager.containsTimelineCollector(appId)); } } private NodeTimelineCollectorManager createCollectorManager() { - final NodeTimelineCollectorManager collectorManager = + final NodeTimelineCollectorManager cm = spy(new NodeTimelineCollectorManager()); - doReturn(new Configuration()).when(collectorManager).getConfig(); + doReturn(new Configuration()).when(cm).getConfig(); CollectorNodemanagerProtocol nmCollectorService = mock(CollectorNodemanagerProtocol.class); GetTimelineCollectorContextResponse response = @@ -164,7 +164,7 @@ private NodeTimelineCollectorManager createCollectorManager() { } catch (YarnException | IOException e) { fail(); } - doReturn(nmCollectorService).when(collectorManager).getNMCollectorService(); - return collectorManager; + doReturn(nmCollectorService).when(cm).getNMCollectorService(); + return cm; } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java index 7c2a4713cc..cb9ced0930 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java @@ -111,7 +111,7 @@ public void testRemoveApplication() throws Exception { // a configured period assertTrue(auxService.hasApplication(appAttemptId.getApplicationId())); for (int i = 0; i < 4; i++) { - Thread.sleep(500l); + Thread.sleep(500L); if (!auxService.hasApplication(appAttemptId.getApplicationId())) { break; } @@ -154,7 +154,7 @@ public void testLaunch() throws Exception { private PerNodeTimelineCollectorsAuxService createCollectorAndAddApplication() { - PerNodeTimelineCollectorsAuxService auxService = createCollector(); + PerNodeTimelineCollectorsAuxService service = createCollector(); // create an AM container ContainerId containerId = getAMContainerId(); ContainerInitializationContext context = @@ -162,17 +162,17 @@ public void testLaunch() throws Exception { when(context.getContainerId()).thenReturn(containerId); when(context.getContainerType()).thenReturn( ContainerType.APPLICATION_MASTER); - auxService.initializeContainer(context); - return auxService; + service.initializeContainer(context); + return service; } private PerNodeTimelineCollectorsAuxService createCollector() { NodeTimelineCollectorManager collectorManager = createCollectorManager(); - PerNodeTimelineCollectorsAuxService auxService = + PerNodeTimelineCollectorsAuxService service = spy(new PerNodeTimelineCollectorsAuxService(collectorManager)); - auxService.init(conf); - auxService.start(); - return auxService; + service.init(conf); + service.start(); + return service; } private NodeTimelineCollectorManager createCollectorManager() { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java index 791d6ab830..bc5eb9c5af 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java @@ -45,11 +45,11 @@ public void testSplitUsingEscapeAndDelimChar() throws Exception { public void testJoinAndEscapeStrings() throws Exception { assertEquals("*!cluster!*!b**o***!xer!oozie**", TimelineReaderUtils.joinAndEscapeStrings( - new String[] { "!cluster", "!b*o*!xer", "oozie*"}, '!', '*')); + new String[] {"!cluster", "!b*o*!xer", "oozie*"}, '!', '*')); assertEquals("*!cluster!*!b**o***!xer!!", TimelineReaderUtils.joinAndEscapeStrings( - new String[] { "!cluster", "!b*o*!xer", "", ""}, '!', '*')); + new String[] {"!cluster", "!b*o*!xer", "", ""}, '!', '*')); assertNull(TimelineReaderUtils.joinAndEscapeStrings( - new String[] { "!cluster", "!b*o*!xer", null, ""}, '!', '*')); + new String[] {"!cluster", "!b*o*!xer", null, ""}, '!', '*')); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java index 4d6592257d..4ade024382 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java @@ -110,7 +110,8 @@ private static void verifyHttpResponse(Client client, URI uri, client.resource(uri).accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertNotNull(resp); - assertEquals(resp.getClientResponseStatus(), expectedStatus); + assertEquals(resp.getStatusInfo().getStatusCode(), + expectedStatus.getStatusCode()); } private static Client createClient() { @@ -126,10 +127,11 @@ private static ClientResponse getResponse(Client client, URI uri) client.resource(uri).accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON).get(ClientResponse.class); if (resp == null || - resp.getClientResponseStatus() != ClientResponse.Status.OK) { + resp.getStatusInfo().getStatusCode() != + ClientResponse.Status.OK.getStatusCode()) { String msg = new String(); if (resp != null) { - msg = resp.getClientResponseStatus().toString(); + msg = String.valueOf(resp.getStatusInfo().getStatusCode()); } throw new IOException("Incorrect response from timeline reader. " + "Status=" + msg); @@ -141,7 +143,8 @@ private static class DummyURLConnectionFactory implements HttpURLConnectionFactory { @Override - public HttpURLConnection getHttpURLConnection(final URL url) throws IOException { + public HttpURLConnection getHttpURLConnection(final URL url) + throws IOException { try { return (HttpURLConnection)url.openConnection(); } catch (UndeclaredThrowableException e) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java index e991d27990..b2837c20c9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java @@ -779,8 +779,8 @@ public void testRelationFiltersParsing() throws Exception { ), new TimelineFilterList( new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, - "type4", Sets.newHashSet((Object)"entity43","entity44", - "entity47","entity49")), + "type4", Sets.newHashSet((Object)"entity43", "entity44", + "entity47", "entity49")), new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type7", Sets.newHashSet((Object)"entity71")) ) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java index 8d6235dc10..d5e791b4e3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java @@ -60,7 +60,7 @@ public void testUIDEncodingDecoding() throws Exception { + "YARN_CONTAINER!container_1111111111_1111_01_000001", uid); assertEquals( context, TimelineUIDConverter.GENERIC_ENTITY_UID.decodeUID(uid)); - context = new TimelineReaderContext("yarn_cluster",null, null, null, + context = new TimelineReaderContext("yarn_cluster", null, null, null, "application_1111111111_1111", "YARN_CONTAINER", "container_1111111111_1111_01_000001"); uid = TimelineUIDConverter.GENERIC_ENTITY_UID.encodeUID(context); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java index 2af78177af..b58bbe358d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java @@ -58,9 +58,9 @@ public class TestFileSystemTimelineReaderImpl { - private static final String rootDir = + private static final String ROOT_DIR = FileSystemTimelineReaderImpl.DEFAULT_TIMELINE_SERVICE_STORAGE_DIR_ROOT; - FileSystemTimelineReaderImpl reader; + private FileSystemTimelineReaderImpl reader; @BeforeClass public static void setup() throws Exception { @@ -68,22 +68,22 @@ public static void setup() throws Exception { // Create app flow mapping file. CSVFormat format = CSVFormat.DEFAULT.withHeader("APP", "USER", "FLOW", "FLOWRUN"); - String appFlowMappingFile = rootDir + "/entities/cluster1/" + + String appFlowMappingFile = ROOT_DIR + "/entities/cluster1/" + FileSystemTimelineReaderImpl.APP_FLOW_MAPPING_FILE; try (PrintWriter out = new PrintWriter(new BufferedWriter( new FileWriter(appFlowMappingFile, true))); CSVPrinter printer = new CSVPrinter(out, format)){ printer.printRecord("app1", "user1", "flow1", 1); - printer.printRecord("app2","user1","flow1,flow",1); + printer.printRecord("app2", "user1", "flow1,flow", 1); printer.close(); } - (new File(rootDir)).deleteOnExit(); + (new File(ROOT_DIR)).deleteOnExit(); } @AfterClass public static void tearDown() throws Exception { - FileUtils.deleteDirectory(new File(rootDir)); + FileUtils.deleteDirectory(new File(ROOT_DIR)); } @Before @@ -91,7 +91,7 @@ public void init() throws Exception { reader = new FileSystemTimelineReaderImpl(); Configuration conf = new YarnConfiguration(); conf.set(FileSystemTimelineReaderImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, - rootDir); + ROOT_DIR); reader.init(conf); } @@ -112,7 +112,7 @@ private static void writeEntityFile(TimelineEntity entity, File dir) } private static void loadEntityData() throws Exception { - File appDir = new File(rootDir + + File appDir = new File(ROOT_DIR + "/entities/cluster1/user1/flow1/1/app1/app/"); TimelineEntity entity11 = new TimelineEntity(); entity11.setId("id_1"); @@ -138,7 +138,7 @@ private static void loadEntityData() throws Exception { metric2.addValue(1425016502016L, 34); metrics.add(metric2); entity11.setMetrics(metrics); - Map configs = new HashMap(); + Map configs = new HashMap(); configs.put("config_1", "127"); entity11.setConfigs(configs); entity11.addRelatesToEntity("flow", "flow1"); @@ -179,7 +179,7 @@ private static void loadEntityData() throws Exception { Map info2 = new HashMap(); info1.put("info2", 4); entity2.addInfo(info2); - Map configs2 = new HashMap(); + Map configs2 = new HashMap(); configs2.put("config_1", "129"); configs2.put("config_3", "def"); entity2.setConfigs(configs2); @@ -216,7 +216,7 @@ private static void loadEntityData() throws Exception { info3.put("info2", 3.5); info3.put("info4", 20); entity3.addInfo(info3); - Map configs3 = new HashMap(); + Map configs3 = new HashMap(); configs3.put("config_1", "123"); configs3.put("config_3", "abc"); entity3.setConfigs(configs3); @@ -254,7 +254,7 @@ private static void loadEntityData() throws Exception { entity4.addEvent(event44); writeEntityFile(entity4, appDir); - File appDir2 = new File(rootDir + + File appDir2 = new File(ROOT_DIR + "/entities/cluster1/user1/flow1,flow/1/app2/app/"); TimelineEntity entity5 = new TimelineEntity(); entity5.setId("id_5"); @@ -298,7 +298,7 @@ public void testGetEntityByClusterAndApp() throws Exception { Assert.assertEquals(0, result.getMetrics().size()); } - /** This test checks whether we can handle commas in app flow mapping csv */ + /** This test checks whether we can handle commas in app flow mapping csv. */ @Test public void testAppFlowMappingCsv() throws Exception { // Test getting an entity by cluster and app where flow entry @@ -317,7 +317,7 @@ public void testAppFlowMappingCsv() throws Exception { public void testGetEntityCustomFields() throws Exception { // Specified fields in addition to default view will be returned. TimelineEntity result = reader.getEntity( - new TimelineReaderContext("cluster1","user1", "flow1", 1L, "app1", + new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO, Field.CONFIGS, Field.METRICS), null)); @@ -336,7 +336,7 @@ public void testGetEntityCustomFields() throws Exception { public void testGetEntityAllFields() throws Exception { // All fields of TimelineEntity will be returned. TimelineEntity result = reader.getEntity( - new TimelineReaderContext("cluster1","user1", "flow1", 1L, "app1", + new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); Assert.assertEquals( @@ -381,9 +381,9 @@ public void testGetEntitiesWithLimit() throws Exception { "app", null), new TimelineEntityFilters(3L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve()); - // Even though 2 entities out of 4 have same created time, one entity - // is left out due to limit - Assert.assertEquals(3, result.size()); + // Even though 2 entities out of 4 have same created time, one entity + // is left out due to limit + Assert.assertEquals(3, result.size()); } @Test @@ -474,9 +474,9 @@ public void testGetFilteredEntities() throws Exception { // Get entities based on event filters. TimelineFilterList eventFilters = new TimelineFilterList(); eventFilters.addFilter( - new TimelineExistsFilter(TimelineCompareOp.EQUAL,"event_2")); + new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_2")); eventFilters.addFilter( - new TimelineExistsFilter(TimelineCompareOp.EQUAL,"event_4")); + new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_4")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), @@ -642,7 +642,7 @@ public void testGetFilteredEntities() throws Exception { new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList2, null), new TimelineDataToRetrieve()); - Assert.assertEquals(1, result.size()); + Assert.assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1")) { Assert.fail("Incorrect filtering based on metric filters"); @@ -757,7 +757,7 @@ public void testGetFilteredEntities() throws Exception { Assert.fail("Incorrect filtering based on info filters"); } } - } + } @Test public void testGetEntitiesByRelations() throws Exception { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineWriterImpl.java index 2f79daa1c6..15be494984 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineWriterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineWriterImpl.java @@ -41,7 +41,8 @@ public class TestFileSystemTimelineWriterImpl { /** - * Unit test for PoC YARN 3264 + * Unit test for PoC YARN 3264. + * * @throws Exception */ @Test @@ -84,8 +85,8 @@ public void testWriteEntityToFile() throws Exception { "app_id", te); String fileName = fsi.getOutputRoot() + - "/entities/cluster_id/user_id/flow_name/flow_version/12345678/app_id/" + - type + "/" + id + + "/entities/cluster_id/user_id/flow_name/flow_version/12345678/" + + "app_id/" + type + "/" + id + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; Path path = Paths.get(fileName); File f = new File(fileName); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java index 77370b789b..368b0604d5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestRowKeys.java @@ -60,7 +60,8 @@ public class TestRowKeys { if (sepByteLen <= byteArr.length) { for (int i = 0; i < sepByteLen; i++) { byteArr[byteArr.length - sepByteLen + i] = - (byte) (longMaxByteArr[byteArr.length - sepByteLen + i] - QUALIFIER_SEP_BYTES[i]); + (byte) (longMaxByteArr[byteArr.length - sepByteLen + i] - + QUALIFIER_SEP_BYTES[i]); } } clusterTs = Bytes.toLong(byteArr); @@ -73,7 +74,8 @@ private static void verifyRowPrefixBytes(byte[] byteRowKeyPrefix) { for (int i = 0; i < sepLen; i++) { assertTrue( "Row key prefix not encoded properly.", - byteRowKeyPrefix[byteRowKeyPrefix.length - sepLen + i] == QUALIFIER_SEP_BYTES[i]); + byteRowKeyPrefix[byteRowKeyPrefix.length - sepLen + i] == + QUALIFIER_SEP_BYTES[i]); } } @@ -94,9 +96,9 @@ public void testApplicationRowKey() { .getRowKeyPrefix(); byte[][] splits = Separator.QUALIFIERS.split(byteRowKeyPrefix, - new int[] { Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - Separator.VARIABLE_SIZE }); + Separator.VARIABLE_SIZE}); assertEquals(5, splits.length); assertEquals(0, splits[4].length); assertEquals(FLOW_NAME, @@ -153,10 +155,10 @@ public void testEntityRowKey() { byte[][] splits = Separator.QUALIFIERS.split( byteRowKeyPrefix, - new int[] { Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE, - Separator.VARIABLE_SIZE }); + Separator.VARIABLE_SIZE}); assertEquals(7, splits.length); assertEquals(0, splits[6].length); assertEquals(APPLICATION_ID, new AppIdKeyConverter().decode(splits[4])); @@ -170,9 +172,9 @@ public void testEntityRowKey() { splits = Separator.QUALIFIERS.split( byteRowKeyPrefix, - new int[] { Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + new int[] {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE }); + AppIdKeyConverter.getKeySize(), Separator.VARIABLE_SIZE}); assertEquals(6, splits.length); assertEquals(0, splits[5].length); AppIdKeyConverter appIdKeyConverter = new AppIdKeyConverter(); @@ -207,8 +209,8 @@ public void testFlowActivityRowKey() { new FlowActivityRowKeyPrefix(CLUSTER, ts).getRowKeyPrefix(); splits = Separator.QUALIFIERS.split(byteRowKeyPrefix, - new int[] { Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - Separator.VARIABLE_SIZE }); + new int[] {Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, + Separator.VARIABLE_SIZE}); assertEquals(3, splits.length); assertEquals(0, splits[2].length); assertEquals(CLUSTER, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java index 27750f3c2a..7d37206bbd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestSeparator.java @@ -86,7 +86,7 @@ public void testEncodedValues() { testEncodeDecode("Double-escape %2$ and %9$ or %%2$ or %%3$, nor %%%2$" + "= no problem!", Separator.QUALIFIERS, Separator.VALUES, Separator.SPACE, Separator.TAB); - } + } @Test public void testSplits() { @@ -101,10 +101,10 @@ public void testSplits() { byte[] intVal1Arr = Bytes.add(sepByteArr, Bytes.copy(maxIntBytes, sepByteArr.length, Bytes.SIZEOF_INT - sepByteArr.length)); byte[] arr = separator.join( - Bytes.toBytes(separator.encode(str1)),longVal1Arr, + Bytes.toBytes(separator.encode(str1)), longVal1Arr, Bytes.toBytes(separator.encode(str2)), intVal1Arr); - int[] sizes = { Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, - Separator.VARIABLE_SIZE, Bytes.SIZEOF_INT }; + int[] sizes = {Separator.VARIABLE_SIZE, Bytes.SIZEOF_LONG, + Separator.VARIABLE_SIZE, Bytes.SIZEOF_INT}; byte[][] splits = separator.split(arr, sizes); assertEquals(4, splits.length); assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); @@ -116,7 +116,7 @@ public void testSplits() { sepByteArr.length), sepByteArr); intVal1Arr = Bytes.add(Bytes.copy(maxIntBytes, 0, Bytes.SIZEOF_INT - sepByteArr.length), sepByteArr); - arr = separator.join(Bytes.toBytes(separator.encode(str1)),longVal1Arr, + arr = separator.join(Bytes.toBytes(separator.encode(str1)), longVal1Arr, Bytes.toBytes(separator.encode(str2)), intVal1Arr); splits = separator.split(arr, sizes); assertEquals(4, splits.length); @@ -129,7 +129,7 @@ public void testSplits() { sepByteArr.length, 4 - sepByteArr.length), sepByteArr); longVal1Arr = Bytes.add(longVal1Arr, Bytes.copy(maxLongBytes, 4, 3 - sepByteArr.length), sepByteArr); - arr = separator.join(Bytes.toBytes(separator.encode(str1)),longVal1Arr, + arr = separator.join(Bytes.toBytes(separator.encode(str1)), longVal1Arr, Bytes.toBytes(separator.encode(str2)), intVal1Arr); splits = separator.split(arr, sizes); assertEquals(4, splits.length); @@ -140,8 +140,8 @@ public void testSplits() { arr = separator.join(Bytes.toBytes(separator.encode(str1)), Bytes.toBytes(separator.encode(str2)), intVal1Arr, longVal1Arr); - int[] sizes1 = { Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Bytes.SIZEOF_INT, Bytes.SIZEOF_LONG }; + int[] sizes1 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Bytes.SIZEOF_INT, Bytes.SIZEOF_LONG}; splits = separator.split(arr, sizes1); assertEquals(4, splits.length); assertEquals(str1, separator.decode(Bytes.toString(splits[0]))); @@ -150,15 +150,15 @@ public void testSplits() { assertEquals(Bytes.toLong(longVal1Arr), Bytes.toLong(splits[3])); try { - int[] sizes2 = { Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, - Bytes.SIZEOF_INT, 7 }; + int[] sizes2 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, + Bytes.SIZEOF_INT, 7}; splits = separator.split(arr, sizes2); fail("Exception should have been thrown."); } catch (IllegalArgumentException e) {} try { - int[] sizes2 = { Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, 2, - Bytes.SIZEOF_LONG }; + int[] sizes2 = {Separator.VARIABLE_SIZE, Separator.VARIABLE_SIZE, 2, + Bytes.SIZEOF_LONG}; splits = separator.split(arr, sizes2); fail("Exception should have been thrown."); } catch (IllegalArgumentException e) {} @@ -196,12 +196,12 @@ public void testJoinStripped() { split = Separator.VALUES.splitEncoded(joined); assertTrue(Iterables.elementsEqual(stringList, split)); - String[] stringArray1 = { "else" }; + String[] stringArray1 = {"else"}; joined = Separator.VALUES.joinEncoded(stringArray1); split = Separator.VALUES.splitEncoded(joined); assertTrue(Iterables.elementsEqual(Arrays.asList(stringArray1), split)); - String[] stringArray2 = { "d", "e?", "f" }; + String[] stringArray2 = {"d", "e?", "f"}; joined = Separator.VALUES.joinEncoded(stringArray2); split = Separator.VALUES.splitEncoded(joined); assertTrue(Iterables.elementsEqual(Arrays.asList(stringArray2), split)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md index d1ef46be60..b6a0da4298 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md @@ -1197,4 +1197,3 @@ container ID. Similarly, application attempt can be queried by specifying entity 1. If any problem occurs in parsing request, HTTP 400 (Bad Request) is returned. 1. If flow context information cannot be retrieved or entity for the given entity id cannot be found, HTTP 404 (Not Found) is returned. 1. For non-recoverable errors while retrieving data, HTTP 500 (Internal Server Error) is returned. -