diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java index ec7ade7daa..164f19dc2f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java @@ -58,6 +58,7 @@ import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.ConverterUtils; /** @@ -290,11 +291,10 @@ private static void configureLocalDirs(Task task, JobConf job) throws IOExceptio private static void configureTask(JobConf job, Task task, Credentials credentials, Token jt) throws IOException { job.setCredentials(credentials); - - ApplicationAttemptId appAttemptId = - ConverterUtils.toContainerId( - System.getenv(Environment.CONTAINER_ID.name())) - .getApplicationAttemptId(); + + ApplicationAttemptId appAttemptId = ContainerId.fromString( + System.getenv(Environment.CONTAINER_ID.name())) + .getApplicationAttemptId(); LOG.debug("APPLICATION_ATTEMPT_ID: " + appAttemptId); // Set it in conf, so as to be able to be used the the OutputCommitter. job.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index f8d54c57ba..c5070f3a92 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -1562,7 +1562,7 @@ public static void main(String[] args) { validateInputParam(appSubmitTimeStr, ApplicationConstants.APP_SUBMIT_TIME_ENV); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); ApplicationAttemptId applicationAttemptId = containerId.getApplicationAttemptId(); if (applicationAttemptId != null) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index 6ee8e00163..0da3afb4cf 100755 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -713,8 +713,7 @@ private static LocalResource createLocalResource(FileSystem fc, Path file, LocalResourceType type, LocalResourceVisibility visibility) throws IOException { FileStatus fstat = fc.getFileStatus(file); - URL resourceURL = ConverterUtils.getYarnUrlFromPath(fc.resolvePath(fstat - .getPath())); + URL resourceURL = URL.fromPath(fc.resolvePath(fstat.getPath())); long resourceSize = fstat.getLen(); long resourceModificationTime = fstat.getModificationTime(); @@ -1247,8 +1246,8 @@ public void setAvataar(Avataar avataar) { public TaskAttemptStateInternal recover(TaskAttemptInfo taInfo, OutputCommitter committer, boolean recoverOutput) { ContainerId containerId = taInfo.getContainerId(); - NodeId containerNodeId = ConverterUtils.toNodeId(taInfo.getHostname() + ":" - + taInfo.getPort()); + NodeId containerNodeId = NodeId.fromString( + taInfo.getHostname() + ":" + taInfo.getPort()); String nodeHttpAddress = StringInterner.weakIntern(taInfo.getHostname() + ":" + taInfo.getHttpPort()); // Resource/Priority/Tokens are only needed while launching the container on diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java index d8e89b1cbc..892c626961 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java @@ -69,8 +69,10 @@ public TaskAttemptInfo(TaskAttempt ta, TaskType type, Boolean isRunning) { this.nodeHttpAddress = ta.getNodeHttpAddress(); this.startTime = report.getStartTime(); this.finishTime = report.getFinishTime(); - this.assignedContainerId = ConverterUtils.toString(report.getContainerId()); this.assignedContainer = report.getContainerId(); + if (assignedContainer != null) { + this.assignedContainerId = assignedContainer.toString(); + } this.progress = report.getProgress() * 100; this.status = report.getStateString(); this.state = report.getTaskAttemptState(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index b43a7b43f0..6ba93e69c0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -254,7 +254,7 @@ public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId, // the job can reaches the final state when MRAppMaster shuts down. this.successfullyUnregistered.set(unregistered); this.assignedQueue = assignedQueue; - this.resource = Resource.newInstance(1234, 2); + this.resource = Resource.newInstance(1234L, 2L); } @Override diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java index 51164916d1..203958d2c1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java @@ -114,7 +114,7 @@ public static void setup() throws AccessControlException, localFS.delete(testDir, true); new File(testDir.toString()).mkdir(); } - + @Before public void prepare() throws IOException { File dir = new File(stagingDir); @@ -134,11 +134,11 @@ public void testMRAppMasterForDifferentUser() throws IOException, InterruptedException { String applicationAttemptIdStr = "appattempt_1317529182569_0004_000001"; String containerIdStr = "container_1317529182569_0004_000001_1"; - + String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis()); @@ -161,15 +161,15 @@ public void testMRAppMasterMidLock() throws IOException, conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); conf.setInt(org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter. FILEOUTPUTCOMMITTER_ALGORITHM_VERSION, 1); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); FileSystem fs = FileSystem.get(conf); //Create the file, but no end file so we should unregister with an error. fs.create(start).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -200,8 +200,8 @@ public void testMRAppMasterJobLaunchTime() throws IOException, conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); conf.setInt(MRJobConfig.NUM_REDUCES, 0); conf.set(JHAdminConfig.MR_HS_JHIST_FORMAT, "json"); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); @@ -219,7 +219,7 @@ public void testMRAppMasterJobLaunchTime() throws IOException, FileSystem fs = FileSystem.get(conf); JobSplitWriter.createSplitFiles(new Path(dir.getAbsolutePath()), conf, fs, new org.apache.hadoop.mapred.InputSplit[0]); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMasterTestLaunchTime appMaster = new MRAppMasterTestLaunchTime(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis()); @@ -237,8 +237,8 @@ public void testMRAppMasterSuccessLock() throws IOException, String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); @@ -246,7 +246,7 @@ public void testMRAppMasterSuccessLock() throws IOException, FileSystem fs = FileSystem.get(conf); fs.create(start).close(); fs.create(end).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -266,7 +266,7 @@ public void testMRAppMasterSuccessLock() throws IOException, // verify the final status is SUCCEEDED verifyFailedStatus((MRAppMasterTest)appMaster, "SUCCEEDED"); } - + @Test public void testMRAppMasterFailLock() throws IOException, InterruptedException { @@ -275,8 +275,8 @@ public void testMRAppMasterFailLock() throws IOException, String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); @@ -284,7 +284,7 @@ public void testMRAppMasterFailLock() throws IOException, FileSystem fs = FileSystem.get(conf); fs.create(start).close(); fs.create(end).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -304,7 +304,7 @@ public void testMRAppMasterFailLock() throws IOException, // verify the final status is FAILED verifyFailedStatus((MRAppMasterTest)appMaster, "FAILED"); } - + @Test public void testMRAppMasterMissingStaging() throws IOException, InterruptedException { @@ -313,16 +313,16 @@ public void testMRAppMasterMissingStaging() throws IOException, String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); //Delete the staging directory File dir = new File(stagingDir); if(dir.exists()) { FileUtils.deleteDirectory(dir); } - - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -353,9 +353,9 @@ public void testMRAppMasterMaxAppAttempts() throws IOException, String containerIdStr = "container_1317529182569_0004_000002_1"; String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); @@ -427,7 +427,7 @@ public void testMRAppMasterCredentials() throws Exception { new Token(identifier, password, AMRMTokenIdentifier.KIND_NAME, appTokenService); credentials.addToken(appTokenService, appToken); - + Text keyAlias = new Text("mySecretKeyAlias"); credentials.addSecretKey(keyAlias, "mySecretKey".getBytes()); Token storedToken = @@ -488,7 +488,7 @@ public void testMRAppMasterCredentials() throws Exception { Assert.assertEquals(storedToken, confCredentials.getToken(tokenAlias)); Assert.assertEquals("mySecretKey", new String(confCredentials.getSecretKey(keyAlias))); - + // Verify the AM's ugi - app token should be present Credentials ugiCredentials = appMaster.getUgi().getCredentials(); Assert.assertEquals(1, ugiCredentials.numberOfSecretKeys()); @@ -507,9 +507,9 @@ public void testMRAppMasterShutDownJob() throws Exception, String applicationAttemptIdStr = "appattempt_1317529182569_0004_000002"; String containerIdStr = "container_1317529182569_0004_000002_1"; String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); @@ -591,7 +591,7 @@ protected void serviceInit(Configuration conf) throws Exception { } this.conf = conf; } - + @Override protected ContainerAllocator createContainerAllocator( final ClientService clientService, final AppContext context) { @@ -628,7 +628,7 @@ protected void serviceStart() throws Exception { public Credentials getCredentials() { return super.getCredentials(); } - + public UserGroupInformation getUgi() { return currentUser; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java index a4853d5e42..b099bcce23 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java @@ -129,8 +129,8 @@ public void testCommitWindow() throws Exception { SystemClock clock = SystemClock.getInstance(); AppContext appContext = mock(AppContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); when(appContext.getApplicationID()).thenReturn(attemptid.getApplicationId()); when(appContext.getApplicationAttemptId()).thenReturn(attemptid); when(appContext.getEventHandler()).thenReturn( @@ -240,8 +240,8 @@ public void testBasic() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); @@ -288,8 +288,8 @@ public void testFailure() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = + ApplicationAttemptId.fromString("appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java index 36221e0500..eaa5af76d5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java @@ -942,8 +942,8 @@ public void runOnNextHeartbeat(Runnable callback) { callback.run(); } }; - ApplicationAttemptId id = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId id = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); when(appContext.getApplicationID()).thenReturn(id.getApplicationId()); when(appContext.getApplicationAttemptId()).thenReturn(id); CommitterEventHandler handler = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java index dcd5d2954b..3c9127fd11 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java @@ -515,7 +515,7 @@ public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype, WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics); WebServicesTestUtils.checkStringMatch("assignedContainerId", - ConverterUtils.toString(ta.getAssignedContainerID()), + ta.getAssignedContainerID().toString(), assignedContainerId); assertEquals("startTime wrong", ta.getLaunchTime(), startTime); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java index 3b87197e1b..c58a774f87 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java @@ -157,7 +157,7 @@ public void setup(JobConf conf) throws IOException { } Path resourcePath; try { - resourcePath = ConverterUtils.getPathFromYarnURL(resource.getResource()); + resourcePath = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException(e); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index c8d8a44555..31e4c0fd4f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.util.Apps; @@ -569,14 +570,13 @@ private static void parseDistributedCacheArtifacts( } String linkName = name.toUri().getPath(); LocalResource orig = localResources.get(linkName); - if(orig != null && !orig.getResource().equals( - ConverterUtils.getYarnUrlFromURI(p.toUri()))) { + if(orig != null && !orig.getResource().equals(URL.fromURI(p.toUri()))) { throw new InvalidJobConfException( getResourceDescription(orig.getType()) + orig.getResource() + " conflicts with " + getResourceDescription(type) + u); } - localResources.put(linkName, LocalResource.newInstance(ConverterUtils - .getYarnUrlFromURI(p.toUri()), type, visibilities[i] + localResources.put(linkName, LocalResource + .newInstance(URL.fromURI(p.toUri()), type, visibilities[i] ? LocalResourceVisibility.PUBLIC : LocalResourceVisibility.PRIVATE, sizes[i], timestamps[i])); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java index ea2ca9e90f..266aa94f0c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java @@ -107,8 +107,8 @@ public void setDatum(Object datum) { * @return the ApplicationAttemptId */ public ApplicationAttemptId getAppAttemptId() { - return ConverterUtils.toApplicationAttemptId(datum.getApplicationAttemptId() - .toString()); + return ApplicationAttemptId.fromString( + datum.getApplicationAttemptId().toString()); } /** @@ -122,7 +122,7 @@ public long getStartTime() { * @return the ContainerId for the MRAppMaster. */ public ContainerId getContainerId() { - return ConverterUtils.toContainerId(datum.getContainerId().toString()); + return ContainerId.fromString(datum.getContainerId().toString()); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java index c8c250a607..3073d5b95f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java @@ -75,7 +75,7 @@ public TaskAttemptStartedEvent(TaskAttemptID attemptId, TaskType taskType, long startTime, String trackerName, int httpPort, int shufflePort, String locality, String avataar) { this(attemptId, taskType, startTime, trackerName, httpPort, shufflePort, - ConverterUtils.toContainerId("container_-1_-1_-1_-1"), locality, + ContainerId.fromString("container_-1_-1_-1_-1"), locality, avataar); } @@ -116,7 +116,7 @@ public EventType getEventType() { } /** Get the ContainerId */ public ContainerId getContainerId() { - return ConverterUtils.toContainerId(datum.getContainerId().toString()); + return ContainerId.fromString(datum.getContainerId().toString()); } /** Get the locality */ public String getLocality() { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java index 60dc235d68..54c2792b12 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java @@ -533,7 +533,7 @@ public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype, WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics); WebServicesTestUtils.checkStringMatch("assignedContainerId", - ConverterUtils.toString(ta.getAssignedContainerID()), + ta.getAssignedContainerID().toString(), assignedContainerId); assertEquals("startTime wrong", ta.getLaunchTime(), startTime); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 1342282784..b30641ebb2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -321,7 +321,7 @@ private LocalResource createApplicationResource(FileContext fs, Path p, LocalRes throws IOException { LocalResource rsrc = recordFactory.newRecordInstance(LocalResource.class); FileStatus rsrcStat = fs.getFileStatus(p); - rsrc.setResource(ConverterUtils.getYarnUrlFromPath(fs + rsrc.setResource(URL.fromPath(fs .getDefaultFileSystem().resolvePath(rsrcStat.getPath()))); rsrc.setSize(rsrcStat.getLen()); rsrc.setTimestamp(rsrcStat.getModificationTime()); @@ -355,8 +355,7 @@ public ApplicationSubmissionContext createApplicationSubmissionContext( Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE); - URL yarnUrlForJobSubmitDir = ConverterUtils - .getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem() + URL yarnUrlForJobSubmitDir = URL.fromPath(defaultFileContext.getDefaultFileSystem() .resolvePath( defaultFileContext.makeQualified(new Path(jobSubmitDir)))); LOG.debug("Creating setup context, jobSubmitDir url is " diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java index a6647f19da..900bdeb20e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java @@ -749,7 +749,7 @@ public void testContainerRollingLog() throws IOException, boolean foundAppMaster = job.isUber(); final Path containerPathComponent = slog.getPath().getParent(); if (!foundAppMaster) { - final ContainerId cid = ConverterUtils.toContainerId( + final ContainerId cid = ContainerId.fromString( containerPathComponent.getName()); foundAppMaster = ((cid.getContainerId() & ContainerId.CONTAINER_ID_BITMASK)== 1); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java index 0d6e900b39..8cbae81995 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java @@ -999,7 +999,7 @@ private String getBaseLocation(String jobId, String user) { final String baseStr = ContainerLocalizer.USERCACHE + "/" + user + "/" + ContainerLocalizer.APPCACHE + "/" - + ConverterUtils.toString(appID) + "/output" + "/"; + + appID.toString() + "/output" + "/"; return baseStr; } diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index c502ffd173..2e44070136 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -39,6 +39,7 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.LogAggregationStatus; import org.apache.hadoop.yarn.applications.distributedshell.ApplicationMaster; @@ -302,7 +303,7 @@ void filterAppsByAggregatedStatus() throws IOException, YarnException { AppInfo app = it.next(); try { ApplicationReport report = client.getApplicationReport( - ConverterUtils.toApplicationId(app.getAppId())); + ApplicationId.fromString(app.getAppId())); LogAggregationStatus aggStatus = report.getLogAggregationStatus(); if (aggStatus.equals(LogAggregationStatus.RUNNING) || aggStatus.equals(LogAggregationStatus.RUNNING_WITH_FAILURE) || diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java index 0a83bc047a..5f3a68ebe1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.api.records; import java.text.NumberFormat; +import java.util.Iterator; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -26,6 +28,8 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.util.Records; +import com.google.common.base.Splitter; + /** *

ApplicationAttemptId denotes the particular attempt * of an ApplicationMaster for a given {@link ApplicationId}.

@@ -38,10 +42,11 @@ @Stable public abstract class ApplicationAttemptId implements Comparable { + private static Splitter _spliter = Splitter.on('_').trimResults(); @Private @Unstable - public static final String appAttemptIdStrPrefix = "appattempt_"; + public static final String appAttemptIdStrPrefix = "appattempt"; @Public @Unstable @@ -131,6 +136,7 @@ public int compareTo(ApplicationAttemptId other) { @Override public String toString() { StringBuilder sb = new StringBuilder(appAttemptIdStrPrefix); + sb.append("_"); sb.append(this.getApplicationId().getClusterTimestamp()).append("_"); sb.append(ApplicationId.appIdFormat.get().format( this.getApplicationId().getId())); @@ -139,4 +145,32 @@ public String toString() { } protected abstract void build(); + + @Public + @Stable + public static ApplicationAttemptId fromString(String applicationAttemptIdStr) { + Iterator it = _spliter.split(applicationAttemptIdStr).iterator(); + if (!it.next().equals(appAttemptIdStrPrefix)) { + throw new IllegalArgumentException("Invalid AppAttemptId prefix: " + + applicationAttemptIdStr); + } + try { + return toApplicationAttemptId(it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid AppAttemptId: " + + applicationAttemptIdStr, n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid AppAttemptId: " + + applicationAttemptIdStr, e); + } + } + + private static ApplicationAttemptId toApplicationAttemptId( + Iterator it) throws NumberFormatException { + ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), + Integer.parseInt(it.next())); + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next())); + return appAttemptId; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java index 90214cd8fc..03a77ce309 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.api.records; import java.text.NumberFormat; +import java.util.Iterator; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -26,6 +28,8 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.util.Records; +import com.google.common.base.Splitter; + /** *

ApplicationId represents the globally unique * identifier for an application.

@@ -38,10 +42,11 @@ @Public @Stable public abstract class ApplicationId implements Comparable { + private static Splitter _spliter = Splitter.on('_').trimResults(); @Private @Unstable - public static final String appIdStrPrefix = "application_"; + public static final String appIdStrPrefix = "application"; @Public @Unstable @@ -105,8 +110,35 @@ public int compareTo(ApplicationId other) { @Override public String toString() { - return appIdStrPrefix + this.getClusterTimestamp() + "_" - + appIdFormat.get().format(getId()); + return appIdStrPrefix + "_" + this.getClusterTimestamp() + "_" + appIdFormat + .get().format(getId()); + } + + private static ApplicationId toApplicationId( + Iterator it) throws NumberFormatException { + ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), + Integer.parseInt(it.next())); + return appId; + } + + @Public + @Stable + public static ApplicationId fromString(String appIdStr) { + Iterator it = _spliter.split((appIdStr)).iterator(); + if (!it.next().equals(appIdStrPrefix)) { + throw new IllegalArgumentException("Invalid ApplicationId prefix: " + + appIdStr + ". The valid ApplicationId should start with prefix " + + appIdStrPrefix); + } + try { + return toApplicationId(it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid ApplicationId: " + + appIdStr, n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid ApplicationId: " + + appIdStr, e); + } } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java index f332651daf..feddeca9e7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java @@ -42,7 +42,7 @@ public abstract class ContainerId implements Comparable{ private static final String CONTAINER_PREFIX = "container"; private static final String EPOCH_PREFIX = "e"; - @Private + @Public @Unstable public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, long containerId) { @@ -97,7 +97,7 @@ public static ContainerId newInstance(ApplicationAttemptId appAttemptId, */ @Public @Deprecated - @Stable + @Unstable public abstract int getId(); /** @@ -205,7 +205,7 @@ public String toString() { } @Public - @Unstable + @Stable public static ContainerId fromString(String containerIdStr) { Iterator it = _SPLITTER.split(containerIdStr).iterator(); if (!it.next().equals(CONTAINER_PREFIX)) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java index c3f859598f..a0b87a7be6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java @@ -20,8 +20,8 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.yarn.util.Records; /** @@ -35,8 +35,8 @@ @Stable public abstract class NodeId implements Comparable { - @Private - @Unstable + @Public + @Stable public static NodeId newInstance(String host, int port) { NodeId nodeId = Records.newRecord(NodeId.class); nodeId.setHost(host); @@ -112,6 +112,23 @@ public int compareTo(NodeId other) { } return hostCompare; } + + @Public + @Stable + public static NodeId fromString(String nodeIdStr) { + String[] parts = nodeIdStr.split(":"); + if (parts.length != 2) { + throw new IllegalArgumentException("Invalid NodeId [" + nodeIdStr + + "]. Expected host:port"); + } + try { + NodeId nodeId = + NodeId.newInstance(parts[0].trim(), Integer.parseInt(parts[1])); + return nodeId; + } catch (NumberFormatException e) { + throw new IllegalArgumentException("Invalid port: " + parts[1], e); + } + } protected abstract void build(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java index 4261117b10..aa28585ab1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java @@ -18,8 +18,13 @@ package org.apache.hadoop.yarn.api.records; +import java.net.URI; +import java.net.URISyntaxException; + import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.util.Records; /** @@ -119,4 +124,48 @@ public static URL newInstance(String scheme, String host, int port, String file) @Public @Stable public abstract void setFile(String file); + + @Public + @Stable + public Path toPath() throws URISyntaxException { + String scheme = getScheme() == null ? "" : getScheme(); + + String authority = ""; + if (getHost() != null) { + authority = getHost(); + if (getUserInfo() != null) { + authority = getUserInfo() + "@" + authority; + } + if (getPort() > 0) { + authority += ":" + getPort(); + } + } + + return new Path( + (new URI(scheme, authority, getFile(), null, null)).normalize()); + } + + @Public + @Stable + public static URL fromURI(URI uri) { + URL url = + RecordFactoryProvider.getRecordFactory(null).newRecordInstance( + URL.class); + if (uri.getHost() != null) { + url.setHost(uri.getHost()); + } + if (uri.getUserInfo() != null) { + url.setUserInfo(uri.getUserInfo()); + } + url.setPort(uri.getPort()); + url.setScheme(uri.getScheme()); + url.setFile(uri.getPath()); + return url; + } + + @Public + @Stable + public static URL fromPath(Path path) { + return fromURI(path.toUri()); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 5e2c90b429..703595c9a5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -435,13 +435,13 @@ public boolean init(String[] args) throws ParseException, IOException { if (!envs.containsKey(Environment.CONTAINER_ID.name())) { if (cliParser.hasOption("app_attempt_id")) { String appIdStr = cliParser.getOptionValue("app_attempt_id", ""); - appAttemptID = ConverterUtils.toApplicationAttemptId(appIdStr); + appAttemptID = ApplicationAttemptId.fromString(appIdStr); } else { throw new IllegalArgumentException( "Application Attempt Id not set in the environment"); } } else { - ContainerId containerId = ConverterUtils.toContainerId(envs + ContainerId containerId = ContainerId.fromString(envs .get(Environment.CONTAINER_ID.name())); appAttemptID = containerId.getApplicationAttemptId(); } @@ -1048,8 +1048,7 @@ public void run() { URL yarnUrl = null; try { - yarnUrl = ConverterUtils.getYarnUrlFromURI( - new URI(renamedScriptPath.toString())); + yarnUrl = URL.fromURI(new URI(renamedScriptPath.toString())); } catch (URISyntaxException e) { LOG.error("Error when trying to use shell script path specified" + " in env, path=" + renamedScriptPath, e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java index 5adc37d825..9879b1e9dd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; @@ -857,7 +858,7 @@ private void addToLocalResources(FileSystem fs, String fileSrcPath, FileStatus scFileStatus = fs.getFileStatus(dst); LocalResource scRsrc = LocalResource.newInstance( - ConverterUtils.getYarnUrlFromURI(dst.toUri()), + URL.fromURI(dst.toUri()), LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime()); localResources.put(fileDstPath, scRsrc); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java index 55fbd60b18..119fa6f3bd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java @@ -53,7 +53,7 @@ public Set getTimelineEntityGroupId(String entityType, public Set getTimelineEntityGroupId(String entityId, String entityType) { if (ApplicationMaster.DSEntity.DS_CONTAINER.toString().equals(entityId)) { - ContainerId containerId = ConverterUtils.toContainerId(entityId); + ContainerId containerId = ContainerId.fromString(entityId); ApplicationId appId = containerId.getApplicationAttemptId() .getApplicationId(); return toEntityGroupId(appId.toString()); @@ -69,7 +69,7 @@ public Set getTimelineEntityGroupId(String entityType, } private Set toEntityGroupId(String strAppId) { - ApplicationId appId = ConverterUtils.toApplicationId(strAppId); + ApplicationId appId = ApplicationId.fromString(strAppId); TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( appId, ApplicationMaster.CONTAINER_ENTITY_GROUP_ID); Set result = new HashSet<>(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java index 2b46fca4b4..9448cf14bc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java @@ -371,8 +371,8 @@ public void run() { } String currAttemptEntityId = entitiesAttempts.getEntities().get(0).getEntityId(); - ApplicationAttemptId attemptId - = ConverterUtils.toApplicationAttemptId(currAttemptEntityId); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString( + currAttemptEntityId); NameValuePair primaryFilter = new NameValuePair( ApplicationMaster.APPID_TIMELINE_FILTER_NAME, attemptId.getApplicationId().toString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java index d9e9fa6f94..865ce005fc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java @@ -301,7 +301,7 @@ public int run(String[] args) throws Exception { */ private void signalToContainer(String containerIdStr, SignalContainerCommand command) throws YarnException, IOException { - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); sysout.println("Signalling container " + containerIdStr); client.signalToContainer(containerId, command); } @@ -327,8 +327,8 @@ private int printApplicationAttemptReport(String applicationAttemptId) throws YarnException, IOException { ApplicationAttemptReport appAttemptReport = null; try { - appAttemptReport = client.getApplicationAttemptReport(ConverterUtils - .toApplicationAttemptId(applicationAttemptId)); + appAttemptReport = client.getApplicationAttemptReport( + ApplicationAttemptId.fromString(applicationAttemptId)); } catch (ApplicationNotFoundException e) { sysout.println("Application for AppAttempt with id '" + applicationAttemptId + "' doesn't exist in RM or Timeline Server."); @@ -384,8 +384,7 @@ private int printContainerReport(String containerId) throws YarnException, IOException { ContainerReport containerReport = null; try { - containerReport = client.getContainerReport((ConverterUtils - .toContainerId(containerId))); + containerReport = client.getContainerReport(ContainerId.fromString(containerId)); } catch (ApplicationNotFoundException e) { sysout.println("Application for Container with id '" + containerId + "' doesn't exist in RM or Timeline Server."); @@ -515,7 +514,7 @@ private int killApplication(String[] applicationIds) throws YarnException, */ private void killApplication(String applicationId) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); ApplicationReport appReport = null; try { appReport = client.getApplicationReport(appId); @@ -540,7 +539,7 @@ private void killApplication(String applicationId) throws YarnException, */ private void moveApplicationAcrossQueues(String applicationId, String queue) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); ApplicationReport appReport = client.getApplicationReport(appId); if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED || appReport.getYarnApplicationState() == YarnApplicationState.KILLED @@ -565,7 +564,7 @@ private void failApplicationAttempt(String attemptId) throws YarnException, IOException { ApplicationId appId; ApplicationAttemptId attId; - attId = ConverterUtils.toApplicationAttemptId(attemptId); + attId = ApplicationAttemptId.fromString(attemptId); appId = attId.getApplicationId(); sysout.println("Failing attempt " + attId + " of application " + appId); @@ -583,8 +582,8 @@ private int printApplicationReport(String applicationId) throws YarnException, IOException { ApplicationReport appReport = null; try { - appReport = client.getApplicationReport(ConverterUtils - .toApplicationId(applicationId)); + appReport = client.getApplicationReport( + ApplicationId.fromString(applicationId)); } catch (ApplicationNotFoundException e) { sysout.println("Application with id '" + applicationId + "' doesn't exist in RM or Timeline Server."); @@ -684,7 +683,7 @@ private void listApplicationAttempts(String applicationId) throws YarnException, new OutputStreamWriter(sysout, Charset.forName("UTF-8"))); List appAttemptsReport = client - .getApplicationAttempts(ConverterUtils.toApplicationId(applicationId)); + .getApplicationAttempts(ApplicationId.fromString(applicationId)); writer.println("Total number of application attempts " + ":" + appAttemptsReport.size()); writer.printf(APPLICATION_ATTEMPTS_PATTERN, "ApplicationAttempt-Id", @@ -711,8 +710,8 @@ private void listContainers(String appAttemptId) throws YarnException, PrintWriter writer = new PrintWriter( new OutputStreamWriter(sysout, Charset.forName("UTF-8"))); - List appsReport = client - .getContainers(ConverterUtils.toApplicationAttemptId(appAttemptId)); + List appsReport = client.getContainers( + ApplicationAttemptId.fromString(appAttemptId)); writer.println("Total number of containers " + ":" + appsReport.size()); writer.printf(CONTAINER_PATTERN, "Container-Id", "Start Time", "Finish Time", "State", "Host", "Node Http Address", "LOG-URL"); @@ -735,7 +734,7 @@ private void listContainers(String appAttemptId) throws YarnException, */ private void updateApplicationPriority(String applicationId, String priority) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); Priority newAppPriority = Priority.newInstance(Integer.parseInt(priority)); sysout.println("Updating priority of an application " + applicationId); Priority updateApplicationPriority = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java index d62ee5ee40..4fdb57b6bc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java @@ -153,7 +153,7 @@ public int run(String[] args) throws Exception { ApplicationId appId = null; try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (Exception e) { System.err.println("Invalid ApplicationId specified"); return -1; @@ -456,8 +456,8 @@ public ContainerReport getContainerReport(String containerIdStr) throws YarnException, IOException { YarnClient yarnClient = createYarnClient(); try { - return yarnClient.getContainerReport(ConverterUtils - .toContainerId(containerIdStr)); + return yarnClient.getContainerReport( + ContainerId.fromString(containerIdStr)); } finally { yarnClient.close(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java index a89551f9c5..f51fee929c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java @@ -243,7 +243,7 @@ private void listDetailedClusterNodes(Set nodeStates) */ private void printNodeStatus(String nodeIdStr) throws YarnException, IOException { - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); List nodesReport = client.getNodeReports(); // Use PrintWriter.println, which uses correct platform line ending. ByteArrayOutputStream baos = new ByteArrayOutputStream(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java index d407c206f5..aa7fc30344 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java @@ -427,7 +427,7 @@ private int updateNodeResource(String nodeIdStr, int memSize, ResourceManagerAdministrationProtocol adminProtocol = createAdminProtocol(); UpdateNodeResourceRequest request = recordFactory.newRecordInstance(UpdateNodeResourceRequest.class); - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); Resource resource = Resources.createResource(memSize, cores); Map resourceMap = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java index 057594d5bb..1551333839 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java @@ -222,7 +222,7 @@ public void testUpdateNodeResource() throws Exception { verify(admin).updateNodeResource(argument.capture()); UpdateNodeResourceRequest request = argument.getValue(); Map resourceMap = request.getNodeResourceMap(); - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); Resource expectedResource = Resources.createResource(memSize, cores); ResourceOption resource = resourceMap.get(nodeId); assertNotNull("resource for " + nodeIdStr + " shouldn't be null.", diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java index 4c1d152ccd..a80f9d7629 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java @@ -99,7 +99,7 @@ private static void deleteOldLogDirsFrom(Path dir, long cutoffMillis, if(appDir.isDirectory() && appDir.getModificationTime() < cutoffMillis) { boolean appTerminated = - isApplicationTerminated(ConverterUtils.toApplicationId(appDir + isApplicationTerminated(ApplicationId.fromString(appDir .getPath().getName()), rmClient); if(appTerminated && shouldDeleteLogDir(appDir, cutoffMillis, fs)) { try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index 98ffce16f3..8b213d5cc1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -209,14 +209,11 @@ public LogValue(List rootLogDirs, ContainerId containerId, public Set getPendingLogFilesToUploadForThisContainer() { Set pendingUploadFiles = new HashSet(); for (String rootLogDir : this.rootLogDirs) { - File appLogDir = - new File(rootLogDir, - ConverterUtils.toString( - this.containerId.getApplicationAttemptId(). - getApplicationId()) - ); + File appLogDir = new File(rootLogDir, + this.containerId.getApplicationAttemptId(). + getApplicationId().toString()); File containerLogDir = - new File(appLogDir, ConverterUtils.toString(this.containerId)); + new File(appLogDir, this.containerId.toString()); if (!containerLogDir.isDirectory()) { continue; // ContainerDir may have been deleted by the user. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java index 3811054a45..26b2b01abc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java @@ -59,7 +59,7 @@ public class LogCLIHelpers implements Configurable { public int dumpAContainersLogs(String appId, String containerId, String nodeId, String jobOwner) throws IOException { ContainerLogsRequest options = new ContainerLogsRequest(); - options.setAppId(ConverterUtils.toApplicationId(appId)); + options.setAppId(ApplicationId.fromString(appId)); options.setContainerId(containerId); options.setNodeId(nodeId); options.setAppOwner(jobOwner); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java index acd29fb02d..67bc2b74fd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java @@ -18,18 +18,13 @@ package org.apache.hadoop.yarn.util; -import static org.apache.hadoop.yarn.util.StringHelper._split; - import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Map.Entry; -import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.SecurityUtil; @@ -41,7 +36,6 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; /** @@ -49,7 +43,7 @@ * from/to 'serializableFormat' to/from hadoop/nativejava data structures. * */ -@Private +@Public public class ConverterUtils { public static final String APPLICATION_PREFIX = "application"; @@ -58,174 +52,114 @@ public class ConverterUtils { /** * return a hadoop path from a given url + * This method is deprecated, use {@link URL#toPath()} instead. * * @param url * url to convert * @return path from {@link URL} * @throws URISyntaxException */ + @Public + @Deprecated public static Path getPathFromYarnURL(URL url) throws URISyntaxException { - String scheme = url.getScheme() == null ? "" : url.getScheme(); - - String authority = ""; - if (url.getHost() != null) { - authority = url.getHost(); - if (url.getUserInfo() != null) { - authority = url.getUserInfo() + "@" + authority; - } - if (url.getPort() > 0) { - authority += ":" + url.getPort(); - } - } - - return new Path( - (new URI(scheme, authority, url.getFile(), null, null)).normalize()); + return url.toPath(); } - - /** - * change from CharSequence to string for map key and value - * @param env map for converting - * @return string,string map + + /* + * This method is deprecated, use {@link URL#fromPath(Path)} instead. */ - public static Map convertToString( - Map env) { - - Map stringMap = new HashMap(); - for (Entry entry: env.entrySet()) { - stringMap.put(entry.getKey().toString(), entry.getValue().toString()); - } - return stringMap; - } - + @Public + @Deprecated public static URL getYarnUrlFromPath(Path path) { - return getYarnUrlFromURI(path.toUri()); + return URL.fromPath(path); } + /* + * This method is deprecated, use {@link URL#fromURI(URI)} instead. + */ + @Public + @Deprecated public static URL getYarnUrlFromURI(URI uri) { - URL url = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(URL.class); - if (uri.getHost() != null) { - url.setHost(uri.getHost()); - } - if (uri.getUserInfo() != null) { - url.setUserInfo(uri.getUserInfo()); - } - url.setPort(uri.getPort()); - url.setScheme(uri.getScheme()); - url.setFile(uri.getPath()); - return url; + return URL.fromURI(uri); } + /* + * This method is deprecated, use {@link ApplicationId#toString()} instead. + */ + @Public + @Deprecated public static String toString(ApplicationId appId) { return appId.toString(); } + /* + * This method is deprecated, use {@link ApplicationId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ApplicationId toApplicationId(RecordFactory recordFactory, - String appIdStr) { - Iterator it = _split(appIdStr).iterator(); - if (!it.next().equals(APPLICATION_PREFIX)) { - throw new IllegalArgumentException("Invalid ApplicationId prefix: " - + appIdStr + ". The valid ApplicationId should start with prefix " - + APPLICATION_PREFIX); - } - try { - return toApplicationId(recordFactory, it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid ApplicationId: " + appIdStr, - n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid ApplicationId: " + appIdStr, - e); - } - } - - private static ApplicationId toApplicationId(RecordFactory recordFactory, - Iterator it) { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - return appId; - } - - private static ApplicationAttemptId toApplicationAttemptId( - Iterator it) throws NumberFormatException { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next())); - return appAttemptId; - } - - private static ApplicationId toApplicationId( - Iterator it) throws NumberFormatException { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - return appId; + String applicationIdStr) { + return ApplicationId.fromString(applicationIdStr); } + /* + * This method is deprecated, use {@link ContainerId#toString()} instead. + */ + @Public + @Deprecated public static String toString(ContainerId cId) { return cId == null ? null : cId.toString(); } - + + @Private + @InterfaceStability.Unstable public static NodeId toNodeIdWithDefaultPort(String nodeIdStr) { if (nodeIdStr.indexOf(":") < 0) { - return toNodeId(nodeIdStr + ":0"); + return NodeId.fromString(nodeIdStr + ":0"); } - return toNodeId(nodeIdStr); + return NodeId.fromString(nodeIdStr); } + /* + * This method is deprecated, use {@link NodeId#fromString(String)} instead. + */ + @Public + @Deprecated public static NodeId toNodeId(String nodeIdStr) { - String[] parts = nodeIdStr.split(":"); - if (parts.length != 2) { - throw new IllegalArgumentException("Invalid NodeId [" + nodeIdStr - + "]. Expected host:port"); - } - try { - NodeId nodeId = - NodeId.newInstance(parts[0].trim(), Integer.parseInt(parts[1])); - return nodeId; - } catch (NumberFormatException e) { - throw new IllegalArgumentException("Invalid port: " + parts[1], e); - } + return NodeId.fromString(nodeIdStr); } + /* + * This method is deprecated, use {@link ContainerId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ContainerId toContainerId(String containerIdStr) { return ContainerId.fromString(containerIdStr); } - + + /* + * This method is deprecated, use {@link ApplicationAttemptId#toString()} + * instead. + */ + @Public + @Deprecated public static ApplicationAttemptId toApplicationAttemptId( - String applicationAttmeptIdStr) { - Iterator it = _split(applicationAttmeptIdStr).iterator(); - if (!it.next().equals(APPLICATION_ATTEMPT_PREFIX)) { - throw new IllegalArgumentException("Invalid AppAttemptId prefix: " - + applicationAttmeptIdStr); - } - try { - return toApplicationAttemptId(it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid AppAttemptId: " - + applicationAttmeptIdStr, n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid AppAttemptId: " - + applicationAttmeptIdStr, e); - } + String applicationAttemptIdStr) { + return ApplicationAttemptId.fromString(applicationAttemptIdStr); } + /* + * This method is deprecated, use {@link ApplicationId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ApplicationId toApplicationId( String appIdStr) { - Iterator it = _split(appIdStr).iterator(); - if (!it.next().equals(APPLICATION_PREFIX)) { - throw new IllegalArgumentException("Invalid ApplicationId prefix: " - + appIdStr + ". The valid ApplicationId should start with prefix " - + APPLICATION_PREFIX); - } - try { - return toApplicationId(it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid ApplicationId: " - + appIdStr, n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid ApplicationId: " - + appIdStr, e); - } + return ApplicationId.fromString(appIdStr); } /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java index bd9c907418..de18dc63d5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java @@ -346,7 +346,7 @@ private long unpack(File localrsrc, File dst) throws IOException { public Path call() throws Exception { final Path sCopy; try { - sCopy = ConverterUtils.getPathFromYarnURL(resource.getResource()); + sCopy = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException("Invalid resource", e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java index 2fc8dfcc9c..1da6e232ea 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java @@ -290,7 +290,7 @@ private ContainerId verifyAndGetContainerId(Block html) { } ContainerId containerId = null; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); } catch (IllegalArgumentException e) { html.h1() ._("Cannot get container logs for invalid containerId: " @@ -308,7 +308,7 @@ private NodeId verifyAndGetNodeId(Block html) { } NodeId nodeId = null; try { - nodeId = ConverterUtils.toNodeId(nodeIdStr); + nodeId = NodeId.fromString(nodeIdStr); } catch (IllegalArgumentException e) { html.h1()._("Cannot get container logs. Invalid nodeId: " + nodeIdStr) ._(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java index 3aa773aea6..624554173c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java @@ -392,7 +392,7 @@ public static ApplicationId parseApplicationId(RecordFactory recordFactory, } ApplicationId aid = null; try { - aid = ConverterUtils.toApplicationId(recordFactory, appId); + aid = ApplicationId.fromString(appId); } catch (Exception e) { throw new BadRequestException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java index 3cec38b060..077558b96a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java @@ -34,55 +34,56 @@ public class TestConverterUtils { @Test public void testConvertUrlWithNoPort() throws URISyntaxException { Path expectedPath = new Path("hdfs://foo.com"); - URL url = ConverterUtils.getYarnUrlFromPath(expectedPath); - Path actualPath = ConverterUtils.getPathFromYarnURL(url); + URL url = URL.fromPath(expectedPath); + Path actualPath = url.toPath(); assertEquals(expectedPath, actualPath); } @Test public void testConvertUrlWithUserinfo() throws URISyntaxException { Path expectedPath = new Path("foo://username:password@example.com:8042"); - URL url = ConverterUtils.getYarnUrlFromPath(expectedPath); - Path actualPath = ConverterUtils.getPathFromYarnURL(url); + URL url = URL.fromPath(expectedPath); + Path actualPath = url.toPath(); assertEquals(expectedPath, actualPath); } @Test public void testContainerId() throws URISyntaxException { ContainerId id = TestContainerId.newContainerId(0, 0, 0, 0); - String cid = ConverterUtils.toString(id); + String cid = id.toString(); assertEquals("container_0_0000_00_000000", cid); - ContainerId gen = ConverterUtils.toContainerId(cid); + ContainerId gen = ContainerId.fromString(cid); assertEquals(gen, id); } @Test public void testContainerIdWithEpoch() throws URISyntaxException { ContainerId id = TestContainerId.newContainerId(0, 0, 0, 25645811); - String cid = ConverterUtils.toString(id); + String cid = id.toString(); assertEquals("container_0_0000_00_25645811", cid); - ContainerId gen = ConverterUtils.toContainerId(cid); + ContainerId gen = ContainerId.fromString(cid); assertEquals(gen.toString(), id.toString()); long ts = System.currentTimeMillis(); ContainerId id2 = TestContainerId.newContainerId(36473, 4365472, ts, 4298334883325L); - String cid2 = ConverterUtils.toString(id2); + String cid2 = id2.toString(); assertEquals( "container_e03_" + ts + "_36473_4365472_999799999997", cid2); - ContainerId gen2 = ConverterUtils.toContainerId(cid2); + ContainerId gen2 = ContainerId.fromString(cid2); assertEquals(gen2.toString(), id2.toString()); ContainerId id3 = TestContainerId.newContainerId(36473, 4365472, ts, 844424930131965L); - String cid3 = ConverterUtils.toString(id3); + String cid3 = id3.toString(); assertEquals( "container_e767_" + ts + "_36473_4365472_1099511627773", cid3); - ContainerId gen3 = ConverterUtils.toContainerId(cid3); + ContainerId gen3 = ContainerId.fromString(cid3); assertEquals(gen3.toString(), id3.toString()); } @Test + @SuppressWarnings("deprecation") public void testContainerIdNull() throws URISyntaxException { assertNull(ConverterUtils.toString((ContainerId)null)); } @@ -101,16 +102,19 @@ public void testNodeIdWithDefaultPort() throws URISyntaxException { } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testInvalidContainerId() { - ConverterUtils.toContainerId("container_e20_1423221031460_0003_01"); + ContainerId.fromString("container_e20_1423221031460_0003_01"); } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testInvalidAppattemptId() { ConverterUtils.toApplicationAttemptId("appattempt_1423221031460"); } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testApplicationId() { ConverterUtils.toApplicationId("application_1423221031460"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java index 376b27d4bf..877dd080af 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java @@ -53,6 +53,7 @@ import java.util.zip.ZipOutputStream; import org.apache.hadoop.util.concurrent.HadoopExecutors; +import org.apache.hadoop.yarn.api.records.URL; import org.junit.Assert; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; @@ -103,7 +104,7 @@ static LocalResource createFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis) throws IOException { createFile(files, p, len, r); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(p)); + ret.setResource(URL.fromPath(p)); ret.setSize(len); ret.setType(LocalResourceType.FILE); ret.setVisibility(vis); @@ -134,7 +135,7 @@ static LocalResource createJar(FileContext files, Path p, LOG.info("Done writing jar stream "); out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(p)); + ret.setResource(URL.fromPath(p)); FileStatus status = files.getFileStatus(p); ret.setSize(status.getLen()); ret.setTimestamp(status.getModificationTime()); @@ -162,7 +163,7 @@ static LocalResource createTarFile(FileContext files, Path p, int len, out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".tar"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -190,7 +191,7 @@ static LocalResource createTgzFile(FileContext files, Path p, int len, out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".tar.gz"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -216,7 +217,7 @@ static LocalResource createJarFile(FileContext files, Path p, int len, out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".jar"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -242,7 +243,7 @@ static LocalResource createZipFile(FileContext files, Path p, int len, out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".ZIP"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java index aedf6f656b..84d4543944 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java @@ -278,7 +278,7 @@ private static ApplicationReportExt convertToApplicationReport( } if (field == ApplicationReportField.USER_AND_ACLS) { return new ApplicationReportExt(ApplicationReport.newInstance( - ConverterUtils.toApplicationId(entity.getEntityId()), + ApplicationId.fromString(entity.getEntityId()), latestApplicationAttemptId, user, queue, name, null, -1, null, state, diagnosticsInfo, null, createdTime, finishedTime, finalStatus, null, null, progress, type, null, appTags, @@ -394,13 +394,10 @@ private static ApplicationReportExt convertToApplicationReport( } if (eventInfo .containsKey(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO)) { - latestApplicationAttemptId = - ConverterUtils - .toApplicationAttemptId( - eventInfo - .get( - ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO) - .toString()); + latestApplicationAttemptId = ApplicationAttemptId.fromString( + eventInfo.get( + ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO) + .toString()); } if (eventInfo .containsKey(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { @@ -426,7 +423,7 @@ private static ApplicationReportExt convertToApplicationReport( } } return new ApplicationReportExt(ApplicationReport.newInstance( - ConverterUtils.toApplicationId(entity.getEntityId()), + ApplicationId.fromString(entity.getEntityId()), latestApplicationAttemptId, user, queue, name, null, -1, null, state, diagnosticsInfo, null, createdTime, finishedTime, finalStatus, appResources, null, progress, type, null, appTags, unmanagedApplication, @@ -471,7 +468,7 @@ private static ApplicationAttemptReport convertToApplicationAttemptReport( if (eventInfo .containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO)) { amContainerId = - ConverterUtils.toContainerId(eventInfo.get( + ContainerId.fromString(eventInfo.get( AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO) .toString()); } @@ -513,7 +510,7 @@ private static ApplicationAttemptReport convertToApplicationAttemptReport( if (eventInfo .containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO)) { amContainerId = - ConverterUtils.toContainerId(eventInfo.get( + ContainerId.fromString(eventInfo.get( AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO) .toString()); } @@ -521,7 +518,7 @@ private static ApplicationAttemptReport convertToApplicationAttemptReport( } } return ApplicationAttemptReport.newInstance( - ConverterUtils.toApplicationAttemptId(entity.getEntityId()), + ApplicationAttemptId.fromString(entity.getEntityId()), host, rpcPort, trackingUrl, originalTrackingUrl, diagnosticsInfo, state, amContainerId); } @@ -610,7 +607,7 @@ private static ContainerReport convertToContainerReport( } } ContainerId containerId = - ConverterUtils.toContainerId(entity.getEntityId()); + ContainerId.fromString(entity.getEntityId()); String logUrl = null; NodeId allocatedNode = null; if (allocatedHost != null) { @@ -623,7 +620,7 @@ private static ContainerReport convertToContainerReport( user); } return ContainerReport.newInstance( - ConverterUtils.toContainerId(entity.getEntityId()), + ContainerId.fromString(entity.getEntityId()), Resource.newInstance(allocatedMem, allocatedVcore), allocatedNode, Priority.newInstance(allocatedPriority), createdTime, finishedTime, diagnosticsInfo, logUrl, exitStatus, state, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java index c340b19025..295b8ab635 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java @@ -204,7 +204,7 @@ public Map getAllApplications() FileStatus[] files = fs.listStatus(rootDirPath); for (FileStatus file : files) { ApplicationId appId = - ConverterUtils.toApplicationId(file.getPath().getName()); + ApplicationId.fromString(file.getPath().getName()); try { ApplicationHistoryData historyData = getApplication(appId); if (historyData != null) { @@ -231,8 +231,8 @@ public Map getAllApplications() HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.startsWith( ConverterUtils.APPLICATION_ATTEMPT_PREFIX)) { - ApplicationAttemptId appAttemptId = - ConverterUtils.toApplicationAttemptId(entry.key.id); + ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString( + entry.key.id); if (appAttemptId.getApplicationId().equals(appId)) { ApplicationAttemptHistoryData historyData = historyDataMap.get(appAttemptId); @@ -385,7 +385,7 @@ public Map getContainers( HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.startsWith(ConverterUtils.CONTAINER_PREFIX)) { ContainerId containerId = - ConverterUtils.toContainerId(entry.key.id); + ContainerId.fromString(entry.key.id); if (containerId.getApplicationAttemptId().equals(appAttemptId)) { ContainerHistoryData historyData = historyDataMap.get(containerId); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java index 22e45fa3d3..134f22de33 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java @@ -113,7 +113,7 @@ public static LocalResource newLocalResource(URL url, LocalResourceType type, public static LocalResource newLocalResource(URI uri, LocalResourceType type, LocalResourceVisibility visibility, long size, long timestamp, boolean shouldBeUploadedToSharedCache) { - return newLocalResource(ConverterUtils.getYarnUrlFromURI(uri), type, + return newLocalResource(URL.fromURI(uri), type, visibility, size, timestamp, shouldBeUploadedToSharedCache); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java index 9c2a1ae04d..798c372673 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java @@ -65,7 +65,7 @@ protected void render(Block html) { } try { - appAttemptId = ConverterUtils.toApplicationAttemptId(attemptid); + appAttemptId = ApplicationAttemptId.fromString(attemptid); } catch (IllegalArgumentException e) { puts("Invalid application attempt ID: " + attemptid); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java index cae8d2e6fb..893e82384f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java @@ -59,7 +59,7 @@ protected void render(Block html) { ContainerId containerId = null; try { - containerId = ConverterUtils.toContainerId(containerid); + containerId = ContainerId.fromString(containerid); } catch (IllegalArgumentException e) { puts("Invalid container ID: " + containerid); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java index 19ea30136e..904c5118f4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java @@ -431,7 +431,7 @@ protected static ApplicationId parseApplicationId(String appId) { } ApplicationId aid = null; try { - aid = ConverterUtils.toApplicationId(appId); + aid = ApplicationId.fromString(appId); } catch (Exception e) { throw new BadRequestException(e); } @@ -449,7 +449,7 @@ protected static ApplicationAttemptId parseApplicationAttemptId( } ApplicationAttemptId aaid = null; try { - aaid = ConverterUtils.toApplicationAttemptId(appAttemptId); + aaid = ApplicationAttemptId.fromString(appAttemptId); } catch (Exception e) { throw new BadRequestException(e); } @@ -466,7 +466,7 @@ protected static ContainerId parseContainerId(String containerId) { } ContainerId cid = null; try { - cid = ConverterUtils.toContainerId(containerId); + cid = ContainerId.fromString(containerId); } catch (Exception e) { throw new BadRequestException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java index 8f4b122110..f8f19bdadc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java @@ -167,11 +167,10 @@ public int launchContainer(ContainerStartContext ctx) throws IOException { ContainerId containerId = container.getContainerId(); // create container dirs on all disks - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); String appIdStr = - ConverterUtils.toString( containerId.getApplicationAttemptId(). - getApplicationId()); + getApplicationId().toString(); for (String sLocalDir : localDirs) { Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, user); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java index 72da2365f5..2b18469436 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java @@ -195,9 +195,9 @@ public int launchContainer(ContainerStartContext ctx) throws IOException { ContainerId containerId = container.getContainerId(); // create container dirs on all disks - String containerIdStr = ConverterUtils.toString(containerId); - String appIdStr = ConverterUtils.toString( - containerId.getApplicationAttemptId().getApplicationId()); + String containerIdStr = containerId.toString(); + String appIdStr = + containerId.getApplicationAttemptId().getApplicationId().toString(); for (String sLocalDir : localDirs) { Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, userName); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java index e46ce569ee..1072b5a415 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java @@ -319,7 +319,7 @@ public int launchContainer(ContainerStartContext ctx) throws IOException { String runAsUser = getRunAsUser(user); ContainerId containerId = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); resourcesHandler.preExecute(containerId, container.getResource()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java index 304488eab9..7a6e1cfa20 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java @@ -1361,7 +1361,7 @@ public void handle(ContainerEvent event) { public String toString() { this.readLock.lock(); try { - return ConverterUtils.toString(this.containerId); + return this.containerId.toString(); } finally { this.readLock.unlock(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java index a3b53e35e4..7e9030cce2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java @@ -163,7 +163,7 @@ public Integer call() { final ContainerLaunchContext launchContext = container.getLaunchContext(); ContainerId containerID = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerID); + String containerIdStr = containerID.toString(); final List command = launchContext.getCommands(); int ret = -1; @@ -326,7 +326,7 @@ protected boolean validateContainerState() { protected List getContainerLogDirs(List logDirs) { List containerLogDirs = new ArrayList<>(logDirs.size()); String appIdStr = app.getAppId().toString(); - String containerIdStr = ConverterUtils.toString(container.getContainerId()); + String containerIdStr = container.getContainerId().toString(); String relativeContainerLogDir = ContainerLaunch .getRelativeContainerLogDir(appIdStr, containerIdStr); @@ -520,7 +520,7 @@ protected String getPidFileSubpath(String appIdStr, String containerIdStr) { @SuppressWarnings("unchecked") // dispatcher not typed public void cleanupContainer() throws IOException { ContainerId containerId = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); LOG.info("Cleaning up container " + containerIdStr); try { @@ -616,7 +616,7 @@ public void signalContainer(SignalContainerCommand command) throws IOException { ContainerId containerId = container.getContainerTokenIdentifier().getContainerID(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); String user = container.getUser(); Signal signal = translateCommandToSignal(command); if (signal.equals(Signal.NULL)) { @@ -708,7 +708,7 @@ public static Signal translateCommandToSignal( */ private String getContainerPid(Path pidFilePath) throws Exception { String containerIdStr = - ConverterUtils.toString(container.getContainerId()); + container.getContainerId().toString(); String processId = null; LOG.debug("Accessing pid for container " + containerIdStr + " from pid file " + pidFilePath); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java index 711d5cdc26..1292df6d31 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java @@ -65,7 +65,7 @@ public Integer call() { } ContainerId containerId = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); int ret = -1; Path containerLogDir; try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java index b9bdcc6c0d..3cd31b703d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java @@ -68,9 +68,9 @@ public RecoveredContainerLaunch(Context context, Configuration configuration, public Integer call() { int retCode = ExitCode.LOST.getExitCode(); ContainerId containerId = container.getContainerId(); - String appIdStr = ConverterUtils.toString( - containerId.getApplicationAttemptId().getApplicationId()); - String containerIdStr = ConverterUtils.toString(containerId); + String appIdStr = + containerId.getApplicationAttemptId().getApplicationId().toString(); + String containerIdStr = containerId.toString(); dispatcher.getEventHandler().handle(new ContainerEvent(containerId, ContainerEventType.CONTAINER_LAUNCHED)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java index 57cc346d89..65fd9d8ece 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java @@ -56,6 +56,7 @@ import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.SerializedException; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; @@ -295,7 +296,7 @@ private LocalizerStatus createStatus() throws InterruptedException { try { Path localPath = fPath.get(); stat.setLocalPath( - ConverterUtils.getYarnUrlFromPath(localPath)); + URL.fromPath(localPath)); stat.setLocalSize( FileUtil.getDU(new File(localPath.getParent().toUri()))); stat.setStatus(ResourceStatusType.FETCH_SUCCESS); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java index 607d0b4086..d2e8e22d45 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java @@ -43,7 +43,7 @@ public class LocalResourceRequest */ public LocalResourceRequest(LocalResource resource) throws URISyntaxException { - this(ConverterUtils.getPathFromYarnURL(resource.getResource()), + this(resource.getResource().toPath(), resource.getTimestamp(), resource.getType(), resource.getVisibility(), @@ -133,7 +133,7 @@ public LocalResourceType getType() { @Override public URL getResource() { - return ConverterUtils.getYarnUrlFromPath(loc); + return URL.fromPath(loc); } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java index b2413add0a..409cc29f31 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java @@ -79,6 +79,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; @@ -301,7 +302,7 @@ public void recoverLocalizedResources(RecoveredLocalizationState state) trackerState = appEntry.getValue(); if (!trackerState.isEmpty()) { ApplicationId appId = appEntry.getKey(); - String appIdStr = ConverterUtils.toString(appId); + String appIdStr = appId.toString(); LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user, appId, dispatcher, false, super.getConfig(), stateStore); LocalResourcesTracker oldTracker = appRsrc.putIfAbsent(appIdStr, @@ -442,7 +443,7 @@ private void handleInitApplicationResources(Application app) { String userName = app.getUser(); privateRsrc.putIfAbsent(userName, new LocalResourcesTrackerImpl(userName, null, dispatcher, true, super.getConfig(), stateStore)); - String appIdStr = ConverterUtils.toString(app.getAppId()); + String appIdStr = app.getAppId().toString(); appRsrc.putIfAbsent(appIdStr, new LocalResourcesTrackerImpl(app.getUser(), app.getAppId(), dispatcher, false, super.getConfig(), stateStore)); // 1) Signal container init @@ -491,7 +492,7 @@ private void handleInitContainerResources( private void handleContainerResourcesLocalized( ContainerLocalizationEvent event) { Container c = event.getContainer(); - String locId = ConverterUtils.toString(c.getContainerId()); + String locId = c.getContainerId().toString(); localizerTracker.endContainerLocalization(locId); } @@ -528,14 +529,15 @@ private void handleCleanupContainerResources( c.getContainerId())); } } - String locId = ConverterUtils.toString(c.getContainerId()); + String locId = c.getContainerId().toString(); localizerTracker.cleanupPrivLocalizers(locId); // Delete the container directories String userName = c.getUser(); String containerIDStr = c.toString(); - String appIDStr = ConverterUtils.toString( - c.getContainerId().getApplicationAttemptId().getApplicationId()); + String appIDStr = + c.getContainerId().getApplicationAttemptId().getApplicationId() + .toString(); // Try deleting from good local dirs and full local dirs because a dir might // have gone bad while the app was running(disk full). In addition @@ -583,7 +585,7 @@ private void handleDestroyApplicationResources(Application application) { ApplicationId appId = application.getAppId(); String appIDStr = application.toString(); LocalResourcesTracker appLocalRsrcsTracker = - appRsrc.remove(ConverterUtils.toString(appId)); + appRsrc.remove(appId.toString()); if (appLocalRsrcsTracker != null) { for (LocalizedResource rsrc : appLocalRsrcsTracker ) { Path localPath = rsrc.getLocalPath(); @@ -637,7 +639,7 @@ LocalResourcesTracker getLocalResourcesTracker( case PRIVATE: return privateRsrc.get(user); case APPLICATION: - return appRsrc.get(ConverterUtils.toString(appId)); + return appRsrc.get(appId.toString()); } } @@ -977,7 +979,7 @@ private LocalResource findNextResource() { LocalResourceRequest nextRsrc = nRsrc.getRequest(); LocalResource next = recordFactory.newRecordInstance(LocalResource.class); - next.setResource(ConverterUtils.getYarnUrlFromPath(nextRsrc + next.setResource(URL.fromPath(nextRsrc .getPath())); next.setTimestamp(nextRsrc.getTimestamp()); next.setType(nextRsrc.getType()); @@ -1028,8 +1030,8 @@ LocalizerHeartbeatResponse processHeartbeat( try { getLocalResourcesTracker(req.getVisibility(), user, applicationId) .handle( - new ResourceLocalizedEvent(req, ConverterUtils - .getPathFromYarnURL(stat.getLocalPath()), stat.getLocalSize())); + new ResourceLocalizedEvent(req, stat.getLocalPath().toPath(), + stat.getLocalSize())); } catch (URISyntaxException e) { } // unlocking the resource and removing it from scheduled resource @@ -1142,8 +1144,8 @@ public void run() { .setNmPrivateContainerTokens(nmPrivateCTokensPath) .setNmAddr(localizationServerAddress) .setUser(context.getUser()) - .setAppId(ConverterUtils.toString(context.getContainerId() - .getApplicationAttemptId().getApplicationId())) + .setAppId(context.getContainerId() + .getApplicationAttemptId().getApplicationId().toString()) .setLocId(localizerId) .setDirsHandler(dirsHandler) .build()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java index 2e05dd7abd..0e732a7ce5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java @@ -37,7 +37,7 @@ public class LocalizerResourceRequestEvent extends LocalizerEvent { public LocalizerResourceRequestEvent(LocalizedResource resource, LocalResourceVisibility vis, LocalizerContext context, String pattern) { super(LocalizerEventType.REQUEST_RESOURCE_LOCALIZATION, - ConverterUtils.toString(context.getContainerId())); + context.getContainerId().toString()); this.vis = vis; this.context = context; this.resource = resource; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java index 682b2726d1..b034e7a209 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java @@ -211,7 +211,7 @@ boolean verifyAccess() throws IOException { final Path remotePath; try { - remotePath = ConverterUtils.getPathFromYarnURL(resource.getResource()); + remotePath = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException("Invalid resource", e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java index ba7836a040..c70fa5b4c7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java @@ -164,7 +164,7 @@ public AppLogAggregatorImpl(Dispatcher dispatcher, this.conf = conf; this.delService = deletionService; this.appId = appId; - this.applicationId = ConverterUtils.toString(appId); + this.applicationId = appId.toString(); this.userUgi = userUgi; this.dirsHandler = dirsHandler; this.remoteNodeLogFileForApp = remoteNodeLogFileForApp; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java index 8bd20402b4..5fe2713495 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java @@ -186,7 +186,7 @@ public List loadContainersState() if (idEndPos < 0) { throw new IOException("Unable to determine container in key: " + key); } - ContainerId containerId = ConverterUtils.toContainerId( + ContainerId containerId = ContainerId.fromString( key.substring(CONTAINERS_KEY_PREFIX.length(), idEndPos)); String keyPrefix = key.substring(0, idEndPos+1); RecoveredContainerState rcs = loadContainerState(containerId, @@ -654,7 +654,7 @@ private RecoveredUserResources loadUserLocalizedResources( throw new IOException("Unable to determine appID in resource key: " + key); } - ApplicationId appId = ConverterUtils.toApplicationId( + ApplicationId appId = ApplicationId.fromString( key.substring(appIdStartPos, appIdEndPos)); userResources.appTrackerStates.put(appId, loadResourceTrackerState(iter, key.substring(0, appIdEndPos+1))); @@ -822,7 +822,7 @@ public RecoveredNMTokensState loadNMTokensState() throws IOException { ApplicationAttemptId.appAttemptIdStrPrefix)) { ApplicationAttemptId attempt; try { - attempt = ConverterUtils.toApplicationAttemptId(key); + attempt = ApplicationAttemptId.fromString(key); } catch (IllegalArgumentException e) { throw new IOException("Bad application master key state for " + fullKey, e); @@ -926,7 +926,7 @@ private static void loadContainerToken(RecoveredContainerTokensState state, ContainerId containerId; Long expTime; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); expTime = Long.parseLong(asString(value)); } catch (IllegalArgumentException e) { throw new IOException("Bad container token state for " + key, e); @@ -988,7 +988,7 @@ public RecoveredLogDeleterState loadLogDeleterState() throws IOException { String appIdStr = fullKey.substring(logDeleterKeyPrefixLength); ApplicationId appId = null; try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (IllegalArgumentException e) { LOG.warn("Skipping unknown log deleter key " + fullKey); continue; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java index 21cf1f27c2..21c3c06404 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java @@ -28,7 +28,7 @@ public class NodeManagerBuilderUtils { public static ResourceLocalizationSpec newResourceLocalizationSpec( LocalResource rsrc, Path path) { - URL local = ConverterUtils.getYarnUrlFromPath(path); + URL local = URL.fromPath(path); ResourceLocalizationSpec resourceLocalizationSpec = Records.newRecord(ResourceLocalizationSpec.class); resourceLocalizationSpec.setDestinationDirectory(local); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java index 80d4db2498..5a7dba7ebc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java @@ -27,6 +27,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.Shell; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.ConverterUtils; /** @@ -69,7 +70,7 @@ public static String getProcessId(Path path) throws IOException { // On Windows, pid is expected to be a container ID, so find first // line that parses successfully as a container ID. try { - ConverterUtils.toContainerId(temp); + ContainerId.fromString(temp); processId = temp; break; } catch (Exception e) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java index bc90d8e40f..2783b18699 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java @@ -76,10 +76,9 @@ public ApplicationBlock(Context nmContext, Configuration conf) { @Override protected void render(Block html) { - ApplicationId applicationID = null; + ApplicationId applicationID; try { - applicationID = ConverterUtils.toApplicationId(this.recordFactory, - $(APPLICATION_ID)); + applicationID = ApplicationId.fromString($(APPLICATION_ID)); } catch (IllegalArgumentException e) { html.p()._("Invalid Application Id " + $(APPLICATION_ID))._(); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java index 2fd6b2cdf1..3e5f4d2e49 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java @@ -92,7 +92,7 @@ protected void render(Block html) { ContainerId containerId; try { - containerId = ConverterUtils.toContainerId($(CONTAINER_ID)); + containerId = ContainerId.fromString($(CONTAINER_ID)); } catch (IllegalArgumentException ex) { html.h1("Invalid container ID: " + $(CONTAINER_ID)); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java index 319f49be5a..35e75939f3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java @@ -78,8 +78,8 @@ static List getContainerLogDirs(ContainerId containerId, List containerLogDirs = new ArrayList(logDirs.size()); for (String logDir : logDirs) { logDir = new File(logDir).toURI().getPath(); - String appIdStr = ConverterUtils.toString(containerId - .getApplicationAttemptId().getApplicationId()); + String appIdStr = containerId + .getApplicationAttemptId().getApplicationId().toString(); File appLogDir = new File(logDir, appIdStr); containerLogDirs.add(new File(appLogDir, containerId.toString())); } @@ -160,7 +160,7 @@ private static void checkState(ContainerState state) { public static FileInputStream openLogFileForRead(String containerIdStr, File logFile, Context context) throws IOException { - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); ApplicationId applicationId = containerId.getApplicationAttemptId() .getApplicationId(); String user = context.getApplications().get( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java index f4367bcb89..a1e0bc7710 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java @@ -63,7 +63,7 @@ public ContainerBlock(Context nmContext) { protected void render(Block html) { ContainerId containerID; try { - containerID = ConverterUtils.toContainerId($(CONTAINER_ID)); + containerID = ContainerId.fromString($(CONTAINER_ID)); } catch (IllegalArgumentException e) { html.p()._("Invalid containerId " + $(CONTAINER_ID))._(); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java index efc0e7e6d9..3a30392ee5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java @@ -181,7 +181,7 @@ public ContainerInfo getNodeContainer(@javax.ws.rs.core.Context ContainerId containerId = null; init(); try { - containerId = ConverterUtils.toContainerId(id); + containerId = ContainerId.fromString(id); } catch (Exception e) { throw new BadRequestException("invalid container id, " + id); } @@ -224,7 +224,7 @@ public Response getLogs(@PathParam("containerid") String containerIdStr, @QueryParam("size") String size) { ContainerId containerId; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); } catch (IllegalArgumentException ex) { return Response.status(Status.BAD_REQUEST).build(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java index 95e2a6537b..f55ca810d8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java @@ -42,14 +42,14 @@ public AppInfo() { } // JAXB needs this public AppInfo(final Application app) { - this.id = ConverterUtils.toString(app.getAppId()); + this.id = app.getAppId().toString(); this.state = app.getApplicationState().toString(); this.user = app.getUser(); this.containerids = new ArrayList(); Map appContainers = app.getContainers(); for (ContainerId containerId : appContainers.keySet()) { - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); containerids.add(containerIdStr); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java index 9fb8ebf43e..5f9b8830a1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java @@ -118,7 +118,7 @@ public void testClearLocalDirWhenNodeReboot() throws IOException, ContainerId cId = createContainerId(); URL localResourceUri = - ConverterUtils.getYarnUrlFromPath(localFS.makeQualified(new Path( + URL.fromPath(localFS.makeQualified(new Path( localResourceDir.getAbsolutePath()))); LocalResource localResource = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java index b3d44f526e..ee2677ce5d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java @@ -741,7 +741,7 @@ public void startContainer() ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java index 980c76440a..b3ad31821d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java @@ -200,7 +200,7 @@ public static void startContainer(NodeManager nm, ContainerId cId, .getCanonicalHostName(), port); URL localResourceUri = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource localResource = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java index 03937ea8b9..6ceaa750f3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java @@ -25,6 +25,7 @@ import java.net.URISyntaxException; import java.util.ArrayList; +import org.apache.hadoop.yarn.api.records.URL; import org.junit.Assert; import org.apache.hadoop.conf.Configuration; @@ -62,7 +63,7 @@ static RecordFactory createPBRecordFactory() { static LocalResource createResource() { LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); assertTrue(ret instanceof LocalResourcePBImpl); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path( + ret.setResource(URL.fromPath(new Path( "hdfs://y.ak:9820/foo/bar"))); ret.setSize(4344L); ret.setTimestamp(3141592653589793L); @@ -76,7 +77,7 @@ static LocalResourceStatus createLocalResourceStatus() { assertTrue(ret instanceof LocalResourceStatusPBImpl); ret.setResource(createResource()); ret.setLocalPath( - ConverterUtils.getYarnUrlFromPath( + URL.fromPath( new Path("file:///local/foo/bar"))); ret.setStatus(ResourceStatusType.FETCH_SUCCESS); ret.setLocalSize(4443L); @@ -109,8 +110,8 @@ static LocalizerHeartbeatResponse createLocalizerHeartbeatResponse() ResourceLocalizationSpec resource = recordFactory.newRecordInstance(ResourceLocalizationSpec.class); resource.setResource(rsrc); - resource.setDestinationDirectory(ConverterUtils - .getYarnUrlFromPath(new Path("/tmp" + System.currentTimeMillis()))); + resource.setDestinationDirectory( + URL.fromPath((new Path("/tmp" + System.currentTimeMillis())))); rsrcs.add(resource); ret.setResourceSpecs(rsrcs); System.out.println(resource); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index 10b9155dd6..1f803b4e67 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -199,7 +199,7 @@ public void testContainerSetup() throws Exception { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(file.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); @@ -229,8 +229,8 @@ public void testContainerSetup() throws Exception { // Now ascertain that the resources are localised correctly. ApplicationId appId = cId.getApplicationAttemptId().getApplicationId(); - String appIDStr = ConverterUtils.toString(appId); - String containerIDStr = ConverterUtils.toString(cId); + String appIDStr = appId.toString(); + String containerIDStr = cId.toString(); File userCacheDir = new File(localDir, ContainerLocalizer.USERCACHE); File userDir = new File(userCacheDir, user); File appCache = new File(userDir, ContainerLocalizer.APPCACHE); @@ -288,7 +288,7 @@ public void testContainerLaunchAndStop() throws IOException, recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -395,7 +395,7 @@ protected void testContainerLaunchAndExit(int exitCode) throws IOException, recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -488,7 +488,7 @@ public void testLocalFilesCleanup() throws InterruptedException, // containerLaunchContext.resources = // new HashMap(); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(FileContext.getLocalFSFileContext() + URL.fromPath(FileContext.getLocalFSFileContext() .makeQualified(new Path(file.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); @@ -521,8 +521,8 @@ public void testLocalFilesCleanup() throws InterruptedException, ApplicationState.RUNNING); // Now ascertain that the resources are localised correctly. - String appIDStr = ConverterUtils.toString(appId); - String containerIDStr = ConverterUtils.toString(cId); + String appIDStr = appId.toString(); + String containerIDStr = cId.toString(); File userCacheDir = new File(localDir, ContainerLocalizer.USERCACHE); File userDir = new File(userCacheDir, user); File appCache = new File(userDir, ContainerLocalizer.APPCACHE); @@ -975,7 +975,7 @@ public void testIncreaseContainerResourceWithInvalidResource() throws Exception ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1059,7 +1059,7 @@ public void testChangeContainerResource() throws Exception { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1181,7 +1181,7 @@ private void testContainerLaunchAndSignal(SignalContainerCommand command) ContainerId cId = createContainerId(0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java index 61477a7ad5..b7d0e48004 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java @@ -415,7 +415,7 @@ public void testContainerResizeRecovery() throws Exception { fileWriter.close(); FileContext localFS = FileContext.getLocalFSFileContext(); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = RecordFactoryProvider .getRecordFactory(null).newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index cf7ca8db0d..a558338ee3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -538,7 +538,7 @@ private void verifyTailErrorLogOnContainerExit(Configuration conf, when(container.getContainerId()).thenReturn(containerId); when(container.getUser()).thenReturn("test"); String relativeContainerLogDir = ContainerLaunch.getRelativeContainerLogDir( - appId.toString(), ConverterUtils.toString(containerId)); + appId.toString(), containerId.toString()); Path containerLogDir = dirsHandler.getLogPathForWrite(relativeContainerLogDir, false); @@ -744,7 +744,7 @@ public void testContainerEnvVariables() throws Exception { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -945,7 +945,7 @@ private void internalKillTest(boolean delayed) throws Exception { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1284,7 +1284,7 @@ public void testKillProcessGroup() throws Exception { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java index 611fc05137..fac708655f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java @@ -404,7 +404,7 @@ static ResourceLocalizationSpec getMockRsrc(Random r, when(resourceLocalizationSpec.getResource()).thenReturn(rsrc); when(resourceLocalizationSpec.getDestinationDirectory()). - thenReturn(ConverterUtils.getYarnUrlFromPath(p)); + thenReturn(URL.fromPath(p)); return resourceLocalizationSpec; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java index 81446f5a87..13310ad510 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourceRequest; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -39,8 +40,10 @@ public class TestLocalResource { static org.apache.hadoop.yarn.api.records.LocalResource getYarnResource(Path p, long size, long timestamp, LocalResourceType type, LocalResourceVisibility state, String pattern) throws URISyntaxException { - org.apache.hadoop.yarn.api.records.LocalResource ret = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(org.apache.hadoop.yarn.api.records.LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromURI(p.toUri())); + org.apache.hadoop.yarn.api.records.LocalResource ret = + RecordFactoryProvider.getRecordFactory(null).newRecordInstance( + org.apache.hadoop.yarn.api.records.LocalResource.class); + ret.setResource(URL.fromURI(p.toUri())); ret.setSize(size); ret.setTimestamp(timestamp); ret.setType(type); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java index c612c14c09..f594d8cf36 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java @@ -945,7 +945,7 @@ public boolean matches(Object o) { // Sigh. Thread init of private localizer not accessible Thread.sleep(1000); dispatcher.await(); - String appStr = ConverterUtils.toString(appId); + String appStr = appId.toString(); String ctnrStr = c.getContainerId().toString(); ArgumentCaptor contextCaptor = ArgumentCaptor .forClass(LocalizerStartContext.class); @@ -2144,12 +2144,16 @@ public void testParallelDownloadAttemptsForPublicResource() throws Exception { // removing pending download request. spyService.getPublicLocalizer().pending.clear(); + LocalizerContext lc = mock(LocalizerContext.class); + when(lc.getContainerId()).thenReturn(ContainerId.newContainerId( + ApplicationAttemptId.newInstance(ApplicationId.newInstance(1L, 1), 1), + 1L)); + // Now I need to simulate a race condition wherein Event is added to // dispatcher before resource state changes to either FAILED or LOCALIZED // Hence sending event directly to dispatcher. LocalizerResourceRequestEvent localizerEvent = - new LocalizerResourceRequestEvent(lr, null, - mock(LocalizerContext.class), null); + new LocalizerResourceRequestEvent(lr, null, lc, null); dispatcher1.getEventHandler().handle(localizerEvent); // Waiting for download to start. This should return false as new download @@ -2457,7 +2461,7 @@ public void testFailedDirsResourceRelease() throws Exception { BuilderUtils.newApplicationId(314159265358979L, 3); when(app.getUser()).thenReturn(user); when(app.getAppId()).thenReturn(appId); - when(app.toString()).thenReturn(ConverterUtils.toString(appId)); + when(app.toString()).thenReturn(appId.toString()); // init container. final Container c = getMockContainer(appId, 42, user); @@ -2468,17 +2472,16 @@ public void testFailedDirsResourceRelease() throws Exception { Path usersdir = new Path(tmpDirs.get(i), ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, user); Path allAppsdir = new Path(userdir, ContainerLocalizer.APPCACHE); - Path appDir = new Path(allAppsdir, ConverterUtils.toString(appId)); + Path appDir = new Path(allAppsdir, appId.toString()); Path containerDir = - new Path(appDir, ConverterUtils.toString(c.getContainerId())); + new Path(appDir, c.getContainerId().toString()); containerLocalDirs.add(containerDir); appLocalDirs.add(appDir); Path sysDir = new Path(tmpDirs.get(i), ResourceLocalizationService.NM_PRIVATE_DIR); - Path appSysDir = new Path(sysDir, ConverterUtils.toString(appId)); - Path containerSysDir = - new Path(appSysDir, ConverterUtils.toString(c.getContainerId())); + Path appSysDir = new Path(sysDir, appId.toString()); + Path containerSysDir = new Path(appSysDir, c.getContainerId().toString()); nmLocalContainerDirs.add(containerSysDir); nmLocalAppDirs.add(appSysDir); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestAppLogAggregatorImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestAppLogAggregatorImpl.java index 0127923b0d..f929ca86fd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestAppLogAggregatorImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestAppLogAggregatorImpl.java @@ -108,10 +108,8 @@ public void testAggregatorWithRetentionPolicyDisabledShouldUploadAllFiles() final ContainerId containerId = ContainerId.newContainerId(attemptId, 0); // create artificial log files - final File appLogDir = new File(LOCAL_LOG_DIR, - ConverterUtils.toString(applicationId)); - final File containerLogDir = new File(appLogDir, - ConverterUtils.toString(containerId)); + final File appLogDir = new File(LOCAL_LOG_DIR, applicationId.toString()); + final File containerLogDir = new File(appLogDir, containerId.toString()); containerLogDir.mkdirs(); final Set logFiles = createContainerLogFiles(containerLogDir, 3); @@ -135,9 +133,9 @@ public void testAggregatorWhenNoFileOlderThanRetentionPolicyShouldUploadAll() // create artificial log files final File appLogDir = new File(LOCAL_LOG_DIR, - ConverterUtils.toString(applicationId)); + applicationId.toString()); final File containerLogDir = new File(appLogDir, - ConverterUtils.toString(containerId)); + containerId.toString()); containerLogDir.mkdirs(); final Set logFiles = createContainerLogFiles(containerLogDir, 3); @@ -163,9 +161,9 @@ public void testAggregatorWhenAllFilesOlderThanRetentionShouldUploadNone() // create artificial log files final File appLogDir = new File(LOCAL_LOG_DIR, - ConverterUtils.toString(applicationId)); + applicationId.toString()); final File containerLogDir = new File(appLogDir, - ConverterUtils.toString(containerId)); + containerId.toString()); containerLogDir.mkdirs(); final Set logFiles = createContainerLogFiles(containerLogDir, 3); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index 3961e1ac51..92c6b805b7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -194,7 +194,7 @@ private void verifyLocalFileDeletion( // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -221,7 +221,7 @@ private void verifyLocalFileDeletion( verify(delSrvc).delete(eq(user), eq((Path) null), eq(new Path(app1LogDir.getAbsolutePath()))); - String containerIdStr = ConverterUtils.toString(container11); + String containerIdStr = container11.toString(); File containerLogDir = new File(app1LogDir, containerIdStr); int count = 0; int maxAttempts = 50; @@ -315,7 +315,7 @@ public void testNoLogsUploadedOnAppFinish() throws Exception { logAggregationService.start(); ApplicationId app = BuilderUtils.newApplicationId(1234, 1); - File appLogDir = new File(localLogDir, ConverterUtils.toString(app)); + File appLogDir = new File(localLogDir, app.toString()); appLogDir.mkdir(); LogAggregationContext context = LogAggregationContext.newInstance("HOST*", "sys*"); @@ -352,7 +352,7 @@ public void testNoContainerOnNode() throws Exception { // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -402,7 +402,7 @@ public void testMultipleAppsLogAggregation() throws Exception { // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -423,7 +423,7 @@ public void testMultipleAppsLogAggregation() throws Exception { BuilderUtils.newApplicationAttemptId(application2, 1); File app2LogDir = - new File(localLogDir, ConverterUtils.toString(application2)); + new File(localLogDir, application2.toString()); app2LogDir.mkdir(); LogAggregationContext contextWithAMOnly = Records.newRecord(LogAggregationContext.class); @@ -452,7 +452,7 @@ public void testMultipleAppsLogAggregation() throws Exception { BuilderUtils.newApplicationAttemptId(application3, 1); File app3LogDir = - new File(localLogDir, ConverterUtils.toString(application3)); + new File(localLogDir, application3.toString()); app3LogDir.mkdir(); LogAggregationContext contextWithAMAndFailed = Records.newRecord(LogAggregationContext.class); @@ -583,7 +583,7 @@ public void testVerifyAndCreateRemoteDirsFailure() BuilderUtils.newApplicationId(System.currentTimeMillis(), (int) (Math.random() * 1000)); File appLogDir = - new File(localLogDir, ConverterUtils.toString(appId2)); + new File(localLogDir, appId2.toString()); appLogDir.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(appId2, this.user, null, this.acls, contextWithAMAndFailed)); @@ -758,7 +758,7 @@ public void testLogAggregationCreateDirsFailsWithoutKillingNM() (int) (Math.random() * 1000)); File appLogDir = - new File(localLogDir, ConverterUtils.toString(appId)); + new File(localLogDir, appId.toString()); appLogDir.mkdir(); Exception e = new RuntimeException("KABOOM!"); @@ -805,7 +805,7 @@ public void testLogAggregationCreateDirsFailsWithoutKillingNM() private void writeContainerLogs(File appLogDir, ContainerId containerId, String[] fileName) throws IOException { // ContainerLogDir should be created - String containerStr = ConverterUtils.toString(containerId); + String containerStr = containerId.toString(); File containerLogDir = new File(appLogDir, containerStr); boolean created = containerLogDir.mkdirs(); LOG.info("Created Dir:" + containerLogDir.getAbsolutePath() + " status :" @@ -943,7 +943,7 @@ private LogFileStatusInLastCycle verifyContainerLogs( Assert.assertTrue("number of containers with logs should be at most " + minNumOfContainers,logMap.size() <= maxNumOfContainers); for (ContainerId cId : expectedContainerIds) { - String containerStr = ConverterUtils.toString(cId); + String containerStr = cId.toString(); Map thisContainerMap = logMap.remove(containerStr); Assert.assertEquals(numOfLogsPerContainer, thisContainerMap.size()); for (String fileType : logFiles) { @@ -998,7 +998,7 @@ public void testLogAggregationForRealContainerLaunch() throws IOException, ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1435,7 +1435,7 @@ public void testLogAggregationServiceWithPatterns() throws Exception { // has only logs from stdout and syslog // AppLogDir should be created File appLogDir1 = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); appLogDir1.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(application1, this.user, null, this.acls, @@ -1460,7 +1460,7 @@ public void testLogAggregationServiceWithPatterns() throws Exception { BuilderUtils.newApplicationAttemptId(application2, 1); File app2LogDir = - new File(localLogDir, ConverterUtils.toString(application2)); + new File(localLogDir, application2.toString()); app2LogDir.mkdir(); LogAggregationContextWithExcludePatterns.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -1485,7 +1485,7 @@ public void testLogAggregationServiceWithPatterns() throws Exception { ApplicationAttemptId appAttemptId3 = BuilderUtils.newApplicationAttemptId(application3, 1); File app3LogDir = - new File(localLogDir, ConverterUtils.toString(application3)); + new File(localLogDir, application3.toString()); app3LogDir.mkdir(); context1.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -1510,7 +1510,7 @@ public void testLogAggregationServiceWithPatterns() throws Exception { ApplicationAttemptId appAttemptId4 = BuilderUtils.newApplicationAttemptId(application4, 1); File app4LogDir = - new File(localLogDir, ConverterUtils.toString(application4)); + new File(localLogDir, application4.toString()); app4LogDir.mkdir(); context2.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -2012,7 +2012,7 @@ private ContainerId finishContainer(ApplicationId application1, containerType); // Simulate log-file creation File appLogDir1 = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); appLogDir1.mkdir(); writeContainerLogs(appLogDir1, containerId, logFiles); @@ -2123,7 +2123,7 @@ private void testLogAggregationService(boolean retentionSizeLimitation) // AppLogDir should be created File appLogDir = - new File(localLogDir, ConverterUtils.toString(application)); + new File(localLogDir, application.toString()); appLogDir.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(application, this.user, null, this.acls, logAggregationContextWithInterval)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java index 94145e40f6..1b4e3b7d77 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java @@ -210,7 +210,7 @@ public void testContainerKillOnMemoryOverflow() throws IOException, ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java index 2f409c8246..d254e4be19 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java @@ -374,7 +374,7 @@ public void testStartResourceLocalization() throws IOException { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -407,7 +407,7 @@ public void testStartResourceLocalization() throws IOException { // start some public and private resources Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -416,7 +416,7 @@ public void testStartResourceLocalization() throws IOException { pubRsrcLocalPath1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -425,7 +425,7 @@ public void testStartResourceLocalization() throws IOException { pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); @@ -470,7 +470,7 @@ public void testFinishResourceLocalization() throws IOException { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -510,7 +510,7 @@ public void testFinishResourceLocalization() throws IOException { // start some public and private resources Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -519,7 +519,7 @@ public void testFinishResourceLocalization() throws IOException { pubRsrcLocalPath1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -528,7 +528,7 @@ public void testFinishResourceLocalization() throws IOException { pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); @@ -589,7 +589,7 @@ public void testRemoveLocalizedResource() throws IOException { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -619,7 +619,7 @@ public void testRemoveLocalizedResource() throws IOException { // add public and private resources and remove some Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -635,7 +635,7 @@ public void testRemoveLocalizedResource() throws IOException { stateStore.finishResourceLocalization(null, null, pubLocalizedProto1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -652,7 +652,7 @@ public void testRemoveLocalizedResource() throws IOException { stateStore.removeLocalizedResource(null, null, pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java index b90c1be0b5..3f71179fca 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java @@ -249,7 +249,7 @@ private void writeContainerLogs(Context nmContext, containerLogDir.mkdirs(); for (String fileType : new String[] { "stdout", "stderr", "syslog" }) { Writer writer = new FileWriter(new File(containerLogDir, fileType)); - writer.write(ConverterUtils.toString(containerId) + "\n Hello " + writer.write(containerId.toString() + "\n Hello " + fileType + "!"); writer.close(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java index 18239f1663..7ec8f27405 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java @@ -40,6 +40,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.util.NodeHealthScriptRunner; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; @@ -280,7 +281,7 @@ public void testNodeHelper(String path, String media) throws JSONException, verifyNodeContainerInfo( conInfo.getJSONObject(i), nmContext.getContainers().get( - ConverterUtils.toContainerId(conInfo.getJSONObject(i).getString( + ContainerId.fromString(conInfo.getJSONObject(i).getString( "id")))); } } @@ -316,7 +317,7 @@ public void testNodeSingleContainersHelper(String media) assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); verifyNodeContainerInfo(json.getJSONObject("container"), nmContext - .getContainers().get(ConverterUtils.toContainerId(id))); + .getContainers().get(ContainerId.fromString(id))); } } @@ -449,7 +450,7 @@ public void testNodeSingleContainerXML() throws JSONException, Exception { NodeList nodes = dom.getElementsByTagName("container"); assertEquals("incorrect number of elements", 1, nodes.getLength()); verifyContainersInfoXML(nodes, - nmContext.getContainers().get(ConverterUtils.toContainerId(id))); + nmContext.getContainers().get(ContainerId.fromString(id))); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java index 3634107c67..e36d96b0bb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java @@ -1345,7 +1345,7 @@ private static void removeApplication(Configuration conf, String applicationId) rmStore.init(conf); rmStore.start(); try { - ApplicationId removeAppId = ConverterUtils.toApplicationId(applicationId); + ApplicationId removeAppId = ApplicationId.fromString(applicationId); LOG.info("Deleting application " + removeAppId + " from state store"); rmStore.removeApplication(removeAppId); LOG.info("Application is deleted from state store"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java index a6f096930e..02f90ddb9a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java @@ -499,7 +499,7 @@ private int loadRMApp(RMState rmState, LeveldbIterator iter, String appIdStr, private ApplicationStateData createApplicationState(String appIdStr, byte[] data) throws IOException { - ApplicationId appId = ConverterUtils.toApplicationId(appIdStr); + ApplicationId appId = ApplicationId.fromString(appIdStr); ApplicationStateDataPBImpl appState = new ApplicationStateDataPBImpl( ApplicationStateDataProto.parseFrom(data)); @@ -545,8 +545,7 @@ ApplicationAttemptStateData loadRMAppAttemptState( private ApplicationAttemptStateData createAttemptState(String itemName, byte[] data) throws IOException { - ApplicationAttemptId attemptId = - ConverterUtils.toApplicationAttemptId(itemName); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString(itemName); ApplicationAttemptStateDataPBImpl attemptState = new ApplicationAttemptStateDataPBImpl( ApplicationAttemptStateDataProto.parseFrom(data)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java index 9afbf6d204..9e05f6d9ff 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java @@ -514,7 +514,7 @@ private synchronized void loadRMAppState(RMState rmState) throws Exception { if (LOG.isDebugEnabled()) { LOG.debug("Loading application from znode: " + childNodeName); } - ApplicationId appId = ConverterUtils.toApplicationId(childNodeName); + ApplicationId appId = ApplicationId.fromString(childNodeName); ApplicationStateDataPBImpl appState = new ApplicationStateDataPBImpl( ApplicationStateDataProto.parseFrom(childData)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java index 045c7bdc95..65491026a9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java @@ -133,7 +133,7 @@ public Map getNodeResourceMap() { = new HashMap (); for (String node : nodes) { - NodeId nid = ConverterUtils.toNodeId(node); + NodeId nid = NodeId.fromString(node); int vcores = getVcoresPerNode(node); int memory = getMemoryPerNode(node); int overCommitTimeout = getOverCommitTimeoutPerNode(node); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java index 512149385d..95f81d43dd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java @@ -346,7 +346,7 @@ public String getLogURL() { logURL.append(WebAppUtils.getHttpSchemePrefix(rmContext .getYarnConfiguration())); logURL.append(WebAppUtils.getRunningLogURL( - container.getNodeHttpAddress(), ConverterUtils.toString(containerId), + container.getNodeHttpAddress(), containerId.toString(), user)); return logURL.toString(); } finally { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java index b4d792143a..305f1d5acc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java @@ -83,8 +83,8 @@ protected void renderData(Block html) { } AppInfo app = new AppInfo(appReport); - ApplicationAttemptId appAttemptId = - ConverterUtils.toApplicationAttemptId(app.getCurrentAppAttemptId()); + ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString( + app.getCurrentAppAttemptId()); String queuePercent = "N/A"; String clusterPercent = "N/A"; if(appReport.getApplicationResourceUsageReport() != null) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java index de2a23f786..0f1a590dfb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java @@ -220,7 +220,7 @@ private String ahsRedirectPath(String uri, RMWebApp rmWebApp) { break; case "appattempt": try{ - appAttemptId = ConverterUtils.toApplicationAttemptId(parts[3]); + appAttemptId = ApplicationAttemptId.fromString(parts[3]); } catch (IllegalArgumentException e) { LOG.debug("Error parsing {} as an ApplicationAttemptId", parts[3], e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java index d05d95202d..878bf65ad1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java @@ -113,6 +113,7 @@ import org.apache.hadoop.yarn.api.records.ReservationRequestInterpreter; import org.apache.hadoop.yarn.api.records.ReservationRequests; import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; @@ -373,7 +374,7 @@ public NodeInfo getNode(@PathParam("nodeId") String nodeId) { if (sched == null) { throw new NotFoundException("Null ResourceScheduler instance"); } - NodeId nid = ConverterUtils.toNodeId(nodeId); + NodeId nid = NodeId.fromString(nodeId); RMNode ni = this.rm.getRMContext().getRMNodes().get(nid); boolean isInactive = false; if (ni == null) { @@ -1467,9 +1468,7 @@ protected ApplicationSubmissionContext createAppSubmissionContext( String error = "Could not parse application id " + newApp.getApplicationId(); try { - appid = - ConverterUtils.toApplicationId(recordFactory, - newApp.getApplicationId()); + appid = ApplicationId.fromString(newApp.getApplicationId()); } catch (Exception e) { throw new BadRequestException(error); } @@ -1553,7 +1552,7 @@ protected ContainerLaunchContext createContainerLaunchContext( LocalResourceInfo l = entry.getValue(); LocalResource lr = LocalResource.newInstance( - ConverterUtils.getYarnUrlFromURI(l.getUrl()), l.getType(), + URL.fromURI(l.getUrl()), l.getType(), l.getVisibility(), l.getSize(), l.getTimestamp()); hlr.put(entry.getKey(), lr); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java index e8c8bca555..55bf999b04 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java @@ -67,7 +67,7 @@ public AppAttemptInfo(ResourceManager rm, RMAppAttempt attempt, String user, this.nodeId = masterContainer.getNodeId().toString(); this.logsLink = WebAppUtils.getRunningLogURL(schemePrefix + masterContainer.getNodeHttpAddress(), - ConverterUtils.toString(masterContainer.getId()), user); + masterContainer.getId().toString(), user); nodesBlacklistedBySystem = StringUtils.join(attempt.getAMBlacklistManager() diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java index 63b601deab..c5c02a806d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java @@ -165,8 +165,7 @@ public AppInfo(ResourceManager rm, RMApp app, Boolean hasAccess, this.amContainerLogsExist = true; this.amContainerLogs = WebAppUtils.getRunningLogURL( schemePrefix + masterContainer.getNodeHttpAddress(), - ConverterUtils.toString(masterContainer.getId()), - app.getUser()); + masterContainer.getId().toString(), app.getUser()); this.amHostHttpAddress = masterContainer.getNodeHttpAddress(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java index b109639a55..af342df605 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java @@ -218,7 +218,7 @@ public void testRefreshNodesResourceWithFileSystemBasedConfigurationProvider() fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); @@ -257,7 +257,7 @@ public void testRefreshNodesResourceWithResourceReturnInRegistration() fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); @@ -307,7 +307,7 @@ public void testRefreshNodesResourceWithResourceReturnInHeartbeat() fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); @@ -355,7 +355,7 @@ public void testResourcePersistentForNMRegistrationWithNewResource() fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java index d46ed051c9..758bbae3c2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java @@ -183,7 +183,7 @@ protected RMAppAttempt storeAttempt(RMStateStore store, RMAppAttemptMetrics mockRmAppAttemptMetrics = mock(RMAppAttemptMetrics.class); Container container = new ContainerPBImpl(); - container.setId(ConverterUtils.toContainerId(containerIdStr)); + container.setId(ContainerId.fromString(containerIdStr)); RMAppAttempt mockAttempt = mock(RMAppAttempt.class); when(mockAttempt.getAppAttemptId()).thenReturn(attemptId); when(mockAttempt.getMasterContainer()).thenReturn(container); @@ -227,8 +227,8 @@ void testRMAppStateStore(RMStateStoreHelper stateStoreHelper, ClientToAMTokenSecretManagerInRM clientToAMTokenMgr = new ClientToAMTokenSecretManagerInRM(); - ApplicationAttemptId attemptId1 = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0001_000001"); + ApplicationAttemptId attemptId1 = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0001_000001"); ApplicationId appId1 = attemptId1.getApplicationId(); storeApp(store, appId1, submitTime, startTime); verifier.afterStoreApp(store, appId1); @@ -245,8 +245,8 @@ void testRMAppStateStore(RMStateStoreHelper stateStoreHelper, .getMasterContainer().getId(); String appAttemptIdStr2 = "appattempt_1352994193343_0001_000002"; - ApplicationAttemptId attemptId2 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr2); + ApplicationAttemptId attemptId2 = ApplicationAttemptId.fromString( + appAttemptIdStr2); // create application token and client token key for attempt2 Token appAttemptToken2 = @@ -259,8 +259,8 @@ void testRMAppStateStore(RMStateStoreHelper stateStoreHelper, appAttemptToken2, clientTokenKey2, dispatcher) .getMasterContainer().getId(); - ApplicationAttemptId attemptIdRemoved = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0002_000001"); + ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0002_000001"); ApplicationId appIdRemoved = attemptIdRemoved.getApplicationId(); storeApp(store, appIdRemoved, submitTime, startTime); storeAttempt(store, attemptIdRemoved, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java index a51ccb5346..61088e1f64 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java @@ -183,7 +183,7 @@ public void testFSRMStateStore() throws Exception { (FileSystemRMStateStore) fsTester.getRMStateStore(); String appAttemptIdStr3 = "appattempt_1352994193343_0001_000003"; ApplicationAttemptId attemptId3 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr3); + ApplicationAttemptId.fromString(appAttemptIdStr3); Path appDir = fsTester.store.getAppDir(attemptId3.getApplicationId().toString()); Path tempAppAttemptFile = @@ -364,7 +364,7 @@ protected void modifyAppState() throws Exception { // imitate appAttemptFile1 is still .new, but old one is deleted String appAttemptIdStr1 = "appattempt_1352994193343_0001_000001"; ApplicationAttemptId attemptId1 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr1); + ApplicationAttemptId.fromString(appAttemptIdStr1); Path appDir = fsTester.store.getAppDir(attemptId1.getApplicationId().toString()); Path appAttemptFile1 = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java index 7df31cf43e..19d30641e3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java @@ -34,6 +34,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl; @@ -399,14 +400,14 @@ public void testFencedState() throws Exception { // Add a new attempt ClientToAMTokenSecretManagerInRM clientToAMTokenMgr = new ClientToAMTokenSecretManagerInRM(); - ApplicationAttemptId attemptId = ConverterUtils - .toApplicationAttemptId("appattempt_1234567894321_0001_000001"); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString( + "appattempt_1234567894321_0001_000001"); SecretKey clientTokenMasterKey = clientToAMTokenMgr.createMasterKey(attemptId); RMAppAttemptMetrics mockRmAppAttemptMetrics = mock(RMAppAttemptMetrics.class); Container container = new ContainerPBImpl(); - container.setId(ConverterUtils.toContainerId("container_1234567891234_0001_01_000001")); + container.setId(ContainerId.fromString("container_1234567891234_0001_01_000001")); RMAppAttempt mockAttempt = mock(RMAppAttempt.class); when(mockAttempt.getAppAttemptId()).thenReturn(attemptId); when(mockAttempt.getMasterContainer()).thenReturn(container); @@ -491,8 +492,8 @@ public void testDuplicateRMAppDeletion() throws Exception { TestDispatcher dispatcher = new TestDispatcher(); store.setRMDispatcher(dispatcher); - ApplicationAttemptId attemptIdRemoved = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0002_000001"); + ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0002_000001"); ApplicationId appIdRemoved = attemptIdRemoved.getApplicationId(); storeApp(store, appIdRemoved, submitTime, startTime); storeAttempt(store, attemptIdRemoved, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java index c7ef8fadbd..682ed75391 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java @@ -57,6 +57,7 @@ import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.LocalResource; @@ -65,6 +66,7 @@ import org.apache.hadoop.yarn.api.records.LogAggregationContext; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.ReservationId; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockNM; @@ -859,7 +861,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia) RMApp app = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appId)); + .get(ApplicationId.fromString(appId)); assertEquals(appName, app.getName()); assertEquals(webserviceUserName, app.getUser()); assertEquals(2, app.getMaxAppAttempts()); @@ -877,8 +879,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia) Map appLRs = ctx.getLocalResources(); assertTrue(appLRs.containsKey(lrKey)); LocalResource exampleLR = appLRs.get(lrKey); - assertEquals(ConverterUtils.getYarnUrlFromURI(y.getUrl()), - exampleLR.getResource()); + assertEquals(URL.fromURI(y.getUrl()), exampleLR.getResource()); assertEquals(y.getSize(), exampleLR.getSize()); assertEquals(y.getTimestamp(), exampleLR.getTimestamp()); assertEquals(y.getType(), exampleLR.getType()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java index 36e24ecf18..4e26bd1ce6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java @@ -50,6 +50,7 @@ import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; @@ -236,11 +237,11 @@ public void testDelegationTokenAuth() throws Exception { boolean appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); RMApp actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); String owner = actualApp.getUser(); assertEquals("client", owner); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java index 2f6a02287c..249e825049 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java @@ -37,6 +37,7 @@ import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.KerberosTestUtils; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -236,11 +237,11 @@ private void testAnonymousSimpleUser() throws Exception { assertEquals(Status.ACCEPTED.getStatusCode(), conn.getResponseCode()); boolean appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); RMApp actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); String owner = actualApp.getUser(); assertEquals( rm.getConfig().get(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, @@ -259,11 +260,11 @@ private void testAnonymousSimpleUser() throws Exception { conn.getInputStream(); appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); owner = actualApp.getUser(); assertEquals("client", owner); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java index 231ca7241a..958b54e623 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java @@ -481,7 +481,7 @@ private static ApplicationId parseApplicationId(String appIdStr) { ApplicationId appId = null; if (appIdStr.startsWith(ApplicationId.appIdStrPrefix)) { try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (IllegalArgumentException e) { appId = null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java index db241a891d..884b5cd18a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java @@ -34,15 +34,16 @@ class EntityGroupPlugInForTest extends TimelineEntityGroupPlugin { public Set getTimelineEntityGroupId(String entityType, NameValuePair primaryFilter, Collection secondaryFilters) { - ApplicationId appId - = ConverterUtils.toApplicationId(primaryFilter.getValue().toString()); + ApplicationId appId = ApplicationId.fromString( + primaryFilter.getValue().toString()); return Sets.newHashSet(getStandardTimelineGroupId(appId)); } @Override public Set getTimelineEntityGroupId(String entityId, String entityType) { - ApplicationId appId = ConverterUtils.toApplicationId(entityId); + ApplicationId appId = ApplicationId.fromString( + entityId); return Sets.newHashSet(getStandardTimelineGroupId(appId)); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java index d6baab6737..1c12f36192 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java @@ -68,7 +68,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { private static final String SAMPLE_APP_PREFIX_CACHE_TEST = "1234_000"; private static final int CACHE_TEST_CACHE_SIZE = 5; - + private static final String TEST_SUMMARY_LOG_FILE_NAME = EntityGroupFSTimelineStore.SUMMARY_LOG_PREFIX + "test"; private static final String TEST_DOMAIN_LOG_FILE_NAME @@ -117,7 +117,7 @@ public static void setupClass() throws Exception { sampleAppIds = new ArrayList<>(CACHE_TEST_CACHE_SIZE + 1); for (int i = 0; i < CACHE_TEST_CACHE_SIZE + 1; i++) { - ApplicationId appId = ConverterUtils.toApplicationId( + ApplicationId appId = ApplicationId.fromString( ConverterUtils.APPLICATION_PREFIX + "_" + SAMPLE_APP_PREFIX_CACHE_TEST + i); sampleAppIds.add(appId);