From d838c6443d43854598555f4bcf6914e401813d11 Mon Sep 17 00:00:00 2001 From: Wangda Tan Date: Tue, 14 Jun 2016 15:21:41 -0700 Subject: [PATCH] YARN-1942. Deprecate toString/fromString methods from ConverterUtils and move them to records classes like ContainerId/ApplicationId, etc. (wangda) --- .../org/apache/hadoop/mapred/YarnChild.java | 10 +- .../hadoop/mapreduce/v2/app/MRAppMaster.java | 2 +- .../v2/app/job/impl/TaskAttemptImpl.java | 7 +- .../v2/app/webapp/dao/TaskAttemptInfo.java | 4 +- .../mapreduce/v2/app/TestMRAppMaster.java | 66 +++--- .../app/commit/TestCommitterEventHandler.java | 12 +- .../v2/app/job/impl/TestJobImpl.java | 4 +- .../app/webapp/TestAMWebServicesAttempts.java | 2 +- .../mapred/LocalDistributedCacheManager.java | 2 +- .../hadoop/mapreduce/v2/util/MRApps.java | 8 +- .../mapreduce/jobhistory/AMStartedEvent.java | 6 +- .../jobhistory/TaskAttemptStartedEvent.java | 4 +- .../hs/webapp/TestHsWebServicesAttempts.java | 2 +- .../org/apache/hadoop/mapred/YARNRunner.java | 5 +- .../hadoop/mapreduce/v2/TestMRJobs.java | 2 +- .../apache/hadoop/mapred/ShuffleHandler.java | 2 +- .../hadoop/tools/HadoopArchiveLogs.java | 3 +- .../api/records/ApplicationAttemptId.java | 36 ++- .../yarn/api/records/ApplicationId.java | 38 +++- .../hadoop/yarn/api/records/ContainerId.java | 6 +- .../hadoop/yarn/api/records/NodeId.java | 23 +- .../apache/hadoop/yarn/api/records/URL.java | 49 +++++ .../distributedshell/ApplicationMaster.java | 7 +- .../applications/distributedshell/Client.java | 3 +- .../DistributedShellTimelinePlugin.java | 4 +- .../TestDistributedShell.java | 4 +- .../yarn/client/cli/ApplicationCLI.java | 27 ++- .../hadoop/yarn/client/cli/LogsCLI.java | 7 +- .../hadoop/yarn/client/cli/NodeCLI.java | 2 +- .../hadoop/yarn/client/cli/RMAdminCLI.java | 2 +- .../yarn/client/cli/TestRMAdminCLI.java | 2 +- .../AggregatedLogDeletionService.java | 2 +- .../logaggregation/AggregatedLogFormat.java | 11 +- .../yarn/logaggregation/LogCLIHelpers.java | 2 +- .../hadoop/yarn/util/ConverterUtils.java | 206 ++++++------------ .../apache/hadoop/yarn/util/FSDownload.java | 2 +- .../yarn/webapp/log/AggregatedLogsBlock.java | 4 +- .../hadoop/yarn/webapp/util/WebAppUtils.java | 2 +- .../hadoop/yarn/util/TestConverterUtils.java | 30 +-- .../hadoop/yarn/util/TestFSDownload.java | 77 ++++--- ...licationHistoryManagerOnTimelineStore.java | 25 +-- .../FileSystemApplicationHistoryStore.java | 8 +- .../yarn/server/utils/BuilderUtils.java | 2 +- .../yarn/server/webapp/AppAttemptBlock.java | 2 +- .../yarn/server/webapp/ContainerBlock.java | 2 +- .../yarn/server/webapp/WebServices.java | 6 +- .../nodemanager/DefaultContainerExecutor.java | 5 +- .../nodemanager/DockerContainerExecutor.java | 6 +- .../nodemanager/LinuxContainerExecutor.java | 2 +- .../container/ContainerImpl.java | 2 +- .../launcher/ContainerLaunch.java | 8 +- .../launcher/RecoveredContainerLaunch.java | 6 +- .../localizer/ContainerLocalizer.java | 3 +- .../localizer/LocalResourceRequest.java | 4 +- .../ResourceLocalizationService.java | 28 +-- .../event/LocalizerResourceRequestEvent.java | 2 +- .../sharedcache/SharedCacheUploader.java | 2 +- .../logaggregation/AppLogAggregatorImpl.java | 2 +- .../recovery/NMLeveldbStateStoreService.java | 10 +- .../util/NodeManagerBuilderUtils.java | 2 +- .../nodemanager/util/ProcessIdFileReader.java | 3 +- .../nodemanager/webapp/ApplicationPage.java | 4 +- .../nodemanager/webapp/ContainerLogsPage.java | 2 +- .../webapp/ContainerLogsUtils.java | 6 +- .../nodemanager/webapp/ContainerPage.java | 2 +- .../nodemanager/webapp/NMWebServices.java | 4 +- .../nodemanager/webapp/dao/AppInfo.java | 4 +- .../nodemanager/TestNodeManagerReboot.java | 2 +- .../nodemanager/TestNodeManagerResync.java | 2 +- .../nodemanager/TestNodeManagerShutdown.java | 2 +- .../impl/pb/TestPBRecordImpl.java | 10 +- .../TestContainerManager.java | 22 +- .../TestContainerManagerRecovery.java | 2 +- .../launcher/TestContainerLaunch.java | 6 +- .../localizer/TestContainerLocalizer.java | 2 +- .../localizer/TestLocalResource.java | 7 +- .../TestResourceLocalizationService.java | 21 +- .../TestLogAggregationService.java | 36 +-- .../monitor/TestContainersMonitor.java | 2 +- .../TestNMLeveldbStateStoreService.java | 24 +- .../nodemanager/webapp/TestNMWebServer.java | 2 +- .../webapp/TestNMWebServicesContainers.java | 7 +- .../resourcemanager/ResourceManager.java | 2 +- .../recovery/LeveldbRMStateStore.java | 5 +- .../recovery/ZKRMStateStore.java | 2 +- .../DynamicResourceConfiguration.java | 2 +- .../rmcontainer/RMContainerImpl.java | 2 +- .../resourcemanager/webapp/RMAppsBlock.java | 4 +- .../webapp/RMWebAppFilter.java | 2 +- .../resourcemanager/webapp/RMWebServices.java | 9 +- .../webapp/dao/AppAttemptInfo.java | 2 +- .../resourcemanager/webapp/dao/AppInfo.java | 3 +- .../resourcemanager/TestRMAdminService.java | 4 +- .../recovery/RMStateStoreTestBase.java | 14 +- .../recovery/TestFSRMStateStore.java | 4 +- .../recovery/TestZKRMStateStore.java | 11 +- .../TestRMWebServicesAppsModification.java | 94 ++++---- ...ServicesDelegationTokenAuthentication.java | 5 +- .../webapp/TestRMWebappAuthentication.java | 9 +- .../timeline/EntityGroupFSTimelineStore.java | 2 +- .../timeline/EntityGroupPlugInForTest.java | 7 +- .../TestEntityGroupFSTimelineStore.java | 4 +- 102 files changed, 626 insertions(+), 547 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java index ec7ade7daa5..164f19dc2f1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java @@ -58,6 +58,7 @@ import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.ConverterUtils; /** @@ -290,11 +291,10 @@ class YarnChild { private static void configureTask(JobConf job, Task task, Credentials credentials, Token jt) throws IOException { job.setCredentials(credentials); - - ApplicationAttemptId appAttemptId = - ConverterUtils.toContainerId( - System.getenv(Environment.CONTAINER_ID.name())) - .getApplicationAttemptId(); + + ApplicationAttemptId appAttemptId = ContainerId.fromString( + System.getenv(Environment.CONTAINER_ID.name())) + .getApplicationAttemptId(); LOG.debug("APPLICATION_ATTEMPT_ID: " + appAttemptId); // Set it in conf, so as to be able to be used the the OutputCommitter. job.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index 40078fff033..0b7decc9a5a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -1544,7 +1544,7 @@ public class MRAppMaster extends CompositeService { validateInputParam(appSubmitTimeStr, ApplicationConstants.APP_SUBMIT_TIME_ENV); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); ApplicationAttemptId applicationAttemptId = containerId.getApplicationAttemptId(); if (applicationAttemptId != null) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index f87b39742e0..4e2b13d975a 100755 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -721,8 +721,7 @@ public abstract class TaskAttemptImpl implements LocalResourceType type, LocalResourceVisibility visibility) throws IOException { FileStatus fstat = fc.getFileStatus(file); - URL resourceURL = ConverterUtils.getYarnUrlFromPath(fc.resolvePath(fstat - .getPath())); + URL resourceURL = URL.fromPath(fc.resolvePath(fstat.getPath())); long resourceSize = fstat.getLen(); long resourceModificationTime = fstat.getModificationTime(); @@ -1263,8 +1262,8 @@ public abstract class TaskAttemptImpl implements public TaskAttemptStateInternal recover(TaskAttemptInfo taInfo, OutputCommitter committer, boolean recoverOutput) { ContainerId containerId = taInfo.getContainerId(); - NodeId containerNodeId = ConverterUtils.toNodeId(taInfo.getHostname() + ":" - + taInfo.getPort()); + NodeId containerNodeId = NodeId.fromString( + taInfo.getHostname() + ":" + taInfo.getPort()); String nodeHttpAddress = StringInterner.weakIntern(taInfo.getHostname() + ":" + taInfo.getHttpPort()); // Resource/Priority/Tokens are only needed while launching the container on diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java index d8e89b1cbc9..892c6269619 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java @@ -69,8 +69,10 @@ public class TaskAttemptInfo { this.nodeHttpAddress = ta.getNodeHttpAddress(); this.startTime = report.getStartTime(); this.finishTime = report.getFinishTime(); - this.assignedContainerId = ConverterUtils.toString(report.getContainerId()); this.assignedContainer = report.getContainerId(); + if (assignedContainer != null) { + this.assignedContainerId = assignedContainer.toString(); + } this.progress = report.getProgress() * 100; this.status = report.getStateString(); this.state = report.getTaskAttemptState(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java index 78a6178bc6b..1ea290a72c4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java @@ -114,7 +114,7 @@ public class TestMRAppMaster { localFS.delete(testDir, true); new File(testDir.toString()).mkdir(); } - + @Before public void prepare() throws IOException { File dir = new File(stagingDir); @@ -134,11 +134,11 @@ public class TestMRAppMaster { InterruptedException { String applicationAttemptIdStr = "appattempt_1317529182569_0004_000001"; String containerIdStr = "container_1317529182569_0004_000001_1"; - + String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis()); @@ -159,15 +159,15 @@ public class TestMRAppMaster { String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); FileSystem fs = FileSystem.get(conf); //Create the file, but no end file so we should unregister with an error. fs.create(start).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -198,8 +198,8 @@ public class TestMRAppMaster { conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); conf.setInt(MRJobConfig.NUM_REDUCES, 0); conf.set(JHAdminConfig.MR_HS_JHIST_FORMAT, "json"); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); @@ -217,7 +217,7 @@ public class TestMRAppMaster { FileSystem fs = FileSystem.get(conf); JobSplitWriter.createSplitFiles(new Path(dir.getAbsolutePath()), conf, fs, new org.apache.hadoop.mapred.InputSplit[0]); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMasterTestLaunchTime appMaster = new MRAppMasterTestLaunchTime(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis()); @@ -235,8 +235,8 @@ public class TestMRAppMaster { String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); @@ -244,7 +244,7 @@ public class TestMRAppMaster { FileSystem fs = FileSystem.get(conf); fs.create(start).close(); fs.create(end).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -264,7 +264,7 @@ public class TestMRAppMaster { // verify the final status is SUCCEEDED verifyFailedStatus((MRAppMasterTest)appMaster, "SUCCEEDED"); } - + @Test public void testMRAppMasterFailLock() throws IOException, InterruptedException { @@ -273,8 +273,8 @@ public class TestMRAppMaster { String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); @@ -282,7 +282,7 @@ public class TestMRAppMaster { FileSystem fs = FileSystem.get(conf); fs.create(start).close(); fs.create(end).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -302,7 +302,7 @@ public class TestMRAppMaster { // verify the final status is FAILED verifyFailedStatus((MRAppMasterTest)appMaster, "FAILED"); } - + @Test public void testMRAppMasterMissingStaging() throws IOException, InterruptedException { @@ -311,16 +311,16 @@ public class TestMRAppMaster { String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); //Delete the staging directory File dir = new File(stagingDir); if(dir.exists()) { FileUtils.deleteDirectory(dir); } - - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -351,9 +351,9 @@ public class TestMRAppMaster { String containerIdStr = "container_1317529182569_0004_000002_1"; String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); @@ -425,7 +425,7 @@ public class TestMRAppMaster { new Token(identifier, password, AMRMTokenIdentifier.KIND_NAME, appTokenService); credentials.addToken(appTokenService, appToken); - + Text keyAlias = new Text("mySecretKeyAlias"); credentials.addSecretKey(keyAlias, "mySecretKey".getBytes()); Token storedToken = @@ -486,7 +486,7 @@ public class TestMRAppMaster { Assert.assertEquals(storedToken, confCredentials.getToken(tokenAlias)); Assert.assertEquals("mySecretKey", new String(confCredentials.getSecretKey(keyAlias))); - + // Verify the AM's ugi - app token should be present Credentials ugiCredentials = appMaster.getUgi().getCredentials(); Assert.assertEquals(1, ugiCredentials.numberOfSecretKeys()); @@ -505,9 +505,9 @@ public class TestMRAppMaster { String applicationAttemptIdStr = "appattempt_1317529182569_0004_000002"; String containerIdStr = "container_1317529182569_0004_000002_1"; String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); @@ -589,7 +589,7 @@ class MRAppMasterTest extends MRAppMaster { } this.conf = conf; } - + @Override protected ContainerAllocator createContainerAllocator( final ClientService clientService, final AppContext context) { @@ -626,7 +626,7 @@ class MRAppMasterTest extends MRAppMaster { public Credentials getCredentials() { return super.getCredentials(); } - + public UserGroupInformation getUgi() { return currentUser; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java index a553bbdab2d..e420b744353 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java @@ -129,8 +129,8 @@ public class TestCommitterEventHandler { SystemClock clock = new SystemClock(); AppContext appContext = mock(AppContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); when(appContext.getApplicationID()).thenReturn(attemptid.getApplicationId()); when(appContext.getApplicationAttemptId()).thenReturn(attemptid); when(appContext.getEventHandler()).thenReturn( @@ -240,8 +240,8 @@ public class TestCommitterEventHandler { YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); @@ -288,8 +288,8 @@ public class TestCommitterEventHandler { YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = + ApplicationAttemptId.fromString("appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java index 33c5c48ecfe..97ec0c7baef 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java @@ -941,8 +941,8 @@ public class TestJobImpl { callback.run(); } }; - ApplicationAttemptId id = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId id = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); when(appContext.getApplicationID()).thenReturn(id.getApplicationId()); when(appContext.getApplicationAttemptId()).thenReturn(id); CommitterEventHandler handler = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java index dcd5d2954ba..3c9127fd116 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java @@ -515,7 +515,7 @@ public class TestAMWebServicesAttempts extends JerseyTest { WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics); WebServicesTestUtils.checkStringMatch("assignedContainerId", - ConverterUtils.toString(ta.getAssignedContainerID()), + ta.getAssignedContainerID().toString(), assignedContainerId); assertEquals("startTime wrong", ta.getLaunchTime(), startTime); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java index 8606ede816c..a7ca9889b04 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java @@ -158,7 +158,7 @@ class LocalDistributedCacheManager { } Path resourcePath; try { - resourcePath = ConverterUtils.getPathFromYarnURL(resource.getResource()); + resourcePath = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException(e); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index feea789efcc..8ca1a9d3f9a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.util.Apps; @@ -608,8 +609,7 @@ public class MRApps extends Apps { } String linkName = name.toUri().getPath(); LocalResource orig = localResources.get(linkName); - org.apache.hadoop.yarn.api.records.URL url = - ConverterUtils.getYarnUrlFromURI(p.toUri()); + URL url = URL.fromURI(p.toUri()); if(orig != null && !orig.getResource().equals(url)) { LOG.warn( getResourceDescription(orig.getType()) + @@ -618,8 +618,8 @@ public class MRApps extends Apps { " This will be an error in Hadoop 2.0"); continue; } - localResources.put(linkName, LocalResource.newInstance(ConverterUtils - .getYarnUrlFromURI(p.toUri()), type, visibilities[i] + localResources.put(linkName, LocalResource + .newInstance(URL.fromURI(p.toUri()), type, visibilities[i] ? LocalResourceVisibility.PUBLIC : LocalResourceVisibility.PRIVATE, sizes[i], timestamps[i])); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java index ea2ca9e90fd..266aa94f0cb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java @@ -107,8 +107,8 @@ public class AMStartedEvent implements HistoryEvent { * @return the ApplicationAttemptId */ public ApplicationAttemptId getAppAttemptId() { - return ConverterUtils.toApplicationAttemptId(datum.getApplicationAttemptId() - .toString()); + return ApplicationAttemptId.fromString( + datum.getApplicationAttemptId().toString()); } /** @@ -122,7 +122,7 @@ public class AMStartedEvent implements HistoryEvent { * @return the ContainerId for the MRAppMaster. */ public ContainerId getContainerId() { - return ConverterUtils.toContainerId(datum.getContainerId().toString()); + return ContainerId.fromString(datum.getContainerId().toString()); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java index c8c250a6078..3073d5b95f1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java @@ -75,7 +75,7 @@ public class TaskAttemptStartedEvent implements HistoryEvent { long startTime, String trackerName, int httpPort, int shufflePort, String locality, String avataar) { this(attemptId, taskType, startTime, trackerName, httpPort, shufflePort, - ConverterUtils.toContainerId("container_-1_-1_-1_-1"), locality, + ContainerId.fromString("container_-1_-1_-1_-1"), locality, avataar); } @@ -116,7 +116,7 @@ public class TaskAttemptStartedEvent implements HistoryEvent { } /** Get the ContainerId */ public ContainerId getContainerId() { - return ConverterUtils.toContainerId(datum.getContainerId().toString()); + return ContainerId.fromString(datum.getContainerId().toString()); } /** Get the locality */ public String getLocality() { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java index 60dc235d684..54c2792b12b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java @@ -533,7 +533,7 @@ public class TestHsWebServicesAttempts extends JerseyTest { WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics); WebServicesTestUtils.checkStringMatch("assignedContainerId", - ConverterUtils.toString(ta.getAssignedContainerID()), + ta.getAssignedContainerID().toString(), assignedContainerId); assertEquals("startTime wrong", ta.getLaunchTime(), startTime); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 1342282784f..b30641ebb22 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -321,7 +321,7 @@ public class YARNRunner implements ClientProtocol { throws IOException { LocalResource rsrc = recordFactory.newRecordInstance(LocalResource.class); FileStatus rsrcStat = fs.getFileStatus(p); - rsrc.setResource(ConverterUtils.getYarnUrlFromPath(fs + rsrc.setResource(URL.fromPath(fs .getDefaultFileSystem().resolvePath(rsrcStat.getPath()))); rsrc.setSize(rsrcStat.getLen()); rsrc.setTimestamp(rsrcStat.getModificationTime()); @@ -355,8 +355,7 @@ public class YARNRunner implements ClientProtocol { Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE); - URL yarnUrlForJobSubmitDir = ConverterUtils - .getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem() + URL yarnUrlForJobSubmitDir = URL.fromPath(defaultFileContext.getDefaultFileSystem() .resolvePath( defaultFileContext.makeQualified(new Path(jobSubmitDir)))); LOG.debug("Creating setup context, jobSubmitDir url is " diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java index 6c85d80d72f..9a4eb67812f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java @@ -758,7 +758,7 @@ public class TestMRJobs { boolean foundAppMaster = job.isUber(); final Path containerPathComponent = slog.getPath().getParent(); if (!foundAppMaster) { - final ContainerId cid = ConverterUtils.toContainerId( + final ContainerId cid = ContainerId.fromString( containerPathComponent.getName()); foundAppMaster = ((cid.getContainerId() & ContainerId.CONTAINER_ID_BITMASK)== 1); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java index 2fb7811080b..286a8954041 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java @@ -999,7 +999,7 @@ public class ShuffleHandler extends AuxiliaryService { final String baseStr = ContainerLocalizer.USERCACHE + "/" + user + "/" + ContainerLocalizer.APPCACHE + "/" - + ConverterUtils.toString(appID) + "/output" + "/"; + + appID.toString() + "/output" + "/"; return baseStr; } diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index 6b8af97e9c4..2d3e43b1ff6 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -39,6 +39,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.LogAggregationStatus; import org.apache.hadoop.yarn.applications.distributedshell.ApplicationMaster; @@ -302,7 +303,7 @@ public class HadoopArchiveLogs implements Tool { AppInfo app = it.next(); try { ApplicationReport report = client.getApplicationReport( - ConverterUtils.toApplicationId(app.getAppId())); + ApplicationId.fromString(app.getAppId())); LogAggregationStatus aggStatus = report.getLogAggregationStatus(); if (aggStatus.equals(LogAggregationStatus.RUNNING) || aggStatus.equals(LogAggregationStatus.RUNNING_WITH_FAILURE) || diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java index 0a83bc047aa..5f3a68ebe1a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.api.records; import java.text.NumberFormat; +import java.util.Iterator; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -26,6 +28,8 @@ import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.util.Records; +import com.google.common.base.Splitter; + /** *

ApplicationAttemptId denotes the particular attempt * of an ApplicationMaster for a given {@link ApplicationId}.

@@ -38,10 +42,11 @@ import org.apache.hadoop.yarn.util.Records; @Stable public abstract class ApplicationAttemptId implements Comparable { + private static Splitter _spliter = Splitter.on('_').trimResults(); @Private @Unstable - public static final String appAttemptIdStrPrefix = "appattempt_"; + public static final String appAttemptIdStrPrefix = "appattempt"; @Public @Unstable @@ -131,6 +136,7 @@ public abstract class ApplicationAttemptId implements @Override public String toString() { StringBuilder sb = new StringBuilder(appAttemptIdStrPrefix); + sb.append("_"); sb.append(this.getApplicationId().getClusterTimestamp()).append("_"); sb.append(ApplicationId.appIdFormat.get().format( this.getApplicationId().getId())); @@ -139,4 +145,32 @@ public abstract class ApplicationAttemptId implements } protected abstract void build(); + + @Public + @Stable + public static ApplicationAttemptId fromString(String applicationAttemptIdStr) { + Iterator it = _spliter.split(applicationAttemptIdStr).iterator(); + if (!it.next().equals(appAttemptIdStrPrefix)) { + throw new IllegalArgumentException("Invalid AppAttemptId prefix: " + + applicationAttemptIdStr); + } + try { + return toApplicationAttemptId(it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid AppAttemptId: " + + applicationAttemptIdStr, n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid AppAttemptId: " + + applicationAttemptIdStr, e); + } + } + + private static ApplicationAttemptId toApplicationAttemptId( + Iterator it) throws NumberFormatException { + ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), + Integer.parseInt(it.next())); + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next())); + return appAttemptId; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java index 90214cd8fca..03a77ce309f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.api.records; import java.text.NumberFormat; +import java.util.Iterator; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -26,6 +28,8 @@ import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.util.Records; +import com.google.common.base.Splitter; + /** *

ApplicationId represents the globally unique * identifier for an application.

@@ -38,10 +42,11 @@ import org.apache.hadoop.yarn.util.Records; @Public @Stable public abstract class ApplicationId implements Comparable { + private static Splitter _spliter = Splitter.on('_').trimResults(); @Private @Unstable - public static final String appIdStrPrefix = "application_"; + public static final String appIdStrPrefix = "application"; @Public @Unstable @@ -105,8 +110,35 @@ public abstract class ApplicationId implements Comparable { @Override public String toString() { - return appIdStrPrefix + this.getClusterTimestamp() + "_" - + appIdFormat.get().format(getId()); + return appIdStrPrefix + "_" + this.getClusterTimestamp() + "_" + appIdFormat + .get().format(getId()); + } + + private static ApplicationId toApplicationId( + Iterator it) throws NumberFormatException { + ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), + Integer.parseInt(it.next())); + return appId; + } + + @Public + @Stable + public static ApplicationId fromString(String appIdStr) { + Iterator it = _spliter.split((appIdStr)).iterator(); + if (!it.next().equals(appIdStrPrefix)) { + throw new IllegalArgumentException("Invalid ApplicationId prefix: " + + appIdStr + ". The valid ApplicationId should start with prefix " + + appIdStrPrefix); + } + try { + return toApplicationId(it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid ApplicationId: " + + appIdStr, n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid ApplicationId: " + + appIdStr, e); + } } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java index f332651daf2..feddeca9e70 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java @@ -42,7 +42,7 @@ public abstract class ContainerId implements Comparable{ private static final String CONTAINER_PREFIX = "container"; private static final String EPOCH_PREFIX = "e"; - @Private + @Public @Unstable public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, long containerId) { @@ -97,7 +97,7 @@ public abstract class ContainerId implements Comparable{ */ @Public @Deprecated - @Stable + @Unstable public abstract int getId(); /** @@ -205,7 +205,7 @@ public abstract class ContainerId implements Comparable{ } @Public - @Unstable + @Stable public static ContainerId fromString(String containerIdStr) { Iterator it = _SPLITTER.split(containerIdStr).iterator(); if (!it.next().equals(CONTAINER_PREFIX)) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java index c3f859598f3..a0b87a7be62 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java @@ -20,8 +20,8 @@ package org.apache.hadoop.yarn.api.records; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.yarn.util.Records; /** @@ -35,8 +35,8 @@ import org.apache.hadoop.yarn.util.Records; @Stable public abstract class NodeId implements Comparable { - @Private - @Unstable + @Public + @Stable public static NodeId newInstance(String host, int port) { NodeId nodeId = Records.newRecord(NodeId.class); nodeId.setHost(host); @@ -112,6 +112,23 @@ public abstract class NodeId implements Comparable { } return hostCompare; } + + @Public + @Stable + public static NodeId fromString(String nodeIdStr) { + String[] parts = nodeIdStr.split(":"); + if (parts.length != 2) { + throw new IllegalArgumentException("Invalid NodeId [" + nodeIdStr + + "]. Expected host:port"); + } + try { + NodeId nodeId = + NodeId.newInstance(parts[0].trim(), Integer.parseInt(parts[1])); + return nodeId; + } catch (NumberFormatException e) { + throw new IllegalArgumentException("Invalid port: " + parts[1], e); + } + } protected abstract void build(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java index 4261117b108..aa28585ab17 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java @@ -18,8 +18,13 @@ package org.apache.hadoop.yarn.api.records; +import java.net.URI; +import java.net.URISyntaxException; + import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.util.Records; /** @@ -119,4 +124,48 @@ public abstract class URL { @Public @Stable public abstract void setFile(String file); + + @Public + @Stable + public Path toPath() throws URISyntaxException { + String scheme = getScheme() == null ? "" : getScheme(); + + String authority = ""; + if (getHost() != null) { + authority = getHost(); + if (getUserInfo() != null) { + authority = getUserInfo() + "@" + authority; + } + if (getPort() > 0) { + authority += ":" + getPort(); + } + } + + return new Path( + (new URI(scheme, authority, getFile(), null, null)).normalize()); + } + + @Public + @Stable + public static URL fromURI(URI uri) { + URL url = + RecordFactoryProvider.getRecordFactory(null).newRecordInstance( + URL.class); + if (uri.getHost() != null) { + url.setHost(uri.getHost()); + } + if (uri.getUserInfo() != null) { + url.setUserInfo(uri.getUserInfo()); + } + url.setPort(uri.getPort()); + url.setScheme(uri.getScheme()); + url.setFile(uri.getPath()); + return url; + } + + @Public + @Stable + public static URL fromPath(Path path) { + return fromURI(path.toUri()); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 0c670df7da4..05e6edc6ec0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -412,13 +412,13 @@ public class ApplicationMaster { if (!envs.containsKey(Environment.CONTAINER_ID.name())) { if (cliParser.hasOption("app_attempt_id")) { String appIdStr = cliParser.getOptionValue("app_attempt_id", ""); - appAttemptID = ConverterUtils.toApplicationAttemptId(appIdStr); + appAttemptID = ApplicationAttemptId.fromString(appIdStr); } else { throw new IllegalArgumentException( "Application Attempt Id not set in the environment"); } } else { - ContainerId containerId = ConverterUtils.toContainerId(envs + ContainerId containerId = ContainerId.fromString(envs .get(Environment.CONTAINER_ID.name())); appAttemptID = containerId.getApplicationAttemptId(); } @@ -1010,8 +1010,7 @@ public class ApplicationMaster { URL yarnUrl = null; try { - yarnUrl = ConverterUtils.getYarnUrlFromURI( - new URI(renamedScriptPath.toString())); + yarnUrl = URL.fromURI(new URI(renamedScriptPath.toString())); } catch (URISyntaxException e) { LOG.error("Error when trying to use shell script path specified" + " in env, path=" + renamedScriptPath, e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java index c6327949781..f349e99cece 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; @@ -822,7 +823,7 @@ public class Client { FileStatus scFileStatus = fs.getFileStatus(dst); LocalResource scRsrc = LocalResource.newInstance( - ConverterUtils.getYarnUrlFromURI(dst.toUri()), + URL.fromURI(dst.toUri()), LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime()); localResources.put(fileDstPath, scRsrc); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java index 55fbd60b188..119fa6f3bd5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java @@ -53,7 +53,7 @@ public class DistributedShellTimelinePlugin extends TimelineEntityGroupPlugin { public Set getTimelineEntityGroupId(String entityId, String entityType) { if (ApplicationMaster.DSEntity.DS_CONTAINER.toString().equals(entityId)) { - ContainerId containerId = ConverterUtils.toContainerId(entityId); + ContainerId containerId = ContainerId.fromString(entityId); ApplicationId appId = containerId.getApplicationAttemptId() .getApplicationId(); return toEntityGroupId(appId.toString()); @@ -69,7 +69,7 @@ public class DistributedShellTimelinePlugin extends TimelineEntityGroupPlugin { } private Set toEntityGroupId(String strAppId) { - ApplicationId appId = ConverterUtils.toApplicationId(strAppId); + ApplicationId appId = ApplicationId.fromString(strAppId); TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( appId, ApplicationMaster.CONTAINER_ENTITY_GROUP_ID); Set result = new HashSet<>(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java index 2b46fca4b45..9448cf14bc3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java @@ -371,8 +371,8 @@ public class TestDistributedShell { } String currAttemptEntityId = entitiesAttempts.getEntities().get(0).getEntityId(); - ApplicationAttemptId attemptId - = ConverterUtils.toApplicationAttemptId(currAttemptEntityId); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString( + currAttemptEntityId); NameValuePair primaryFilter = new NameValuePair( ApplicationMaster.APPID_TIMELINE_FILTER_NAME, attemptId.getApplicationId().toString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java index 786e5819f75..e66fbd7d92f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java @@ -301,7 +301,7 @@ public class ApplicationCLI extends YarnCLI { */ private void signalToContainer(String containerIdStr, SignalContainerCommand command) throws YarnException, IOException { - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); sysout.println("Signalling container " + containerIdStr); client.signalToContainer(containerId, command); } @@ -327,8 +327,8 @@ public class ApplicationCLI extends YarnCLI { throws YarnException, IOException { ApplicationAttemptReport appAttemptReport = null; try { - appAttemptReport = client.getApplicationAttemptReport(ConverterUtils - .toApplicationAttemptId(applicationAttemptId)); + appAttemptReport = client.getApplicationAttemptReport( + ApplicationAttemptId.fromString(applicationAttemptId)); } catch (ApplicationNotFoundException e) { sysout.println("Application for AppAttempt with id '" + applicationAttemptId + "' doesn't exist in RM or Timeline Server."); @@ -384,8 +384,7 @@ public class ApplicationCLI extends YarnCLI { IOException { ContainerReport containerReport = null; try { - containerReport = client.getContainerReport((ConverterUtils - .toContainerId(containerId))); + containerReport = client.getContainerReport(ContainerId.fromString(containerId)); } catch (ApplicationNotFoundException e) { sysout.println("Application for Container with id '" + containerId + "' doesn't exist in RM or Timeline Server."); @@ -515,7 +514,7 @@ public class ApplicationCLI extends YarnCLI { */ private void killApplication(String applicationId) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); ApplicationReport appReport = null; try { appReport = client.getApplicationReport(appId); @@ -540,7 +539,7 @@ public class ApplicationCLI extends YarnCLI { */ private void moveApplicationAcrossQueues(String applicationId, String queue) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); ApplicationReport appReport = client.getApplicationReport(appId); if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED || appReport.getYarnApplicationState() == YarnApplicationState.KILLED @@ -565,7 +564,7 @@ public class ApplicationCLI extends YarnCLI { IOException { ApplicationId appId; ApplicationAttemptId attId; - attId = ConverterUtils.toApplicationAttemptId(attemptId); + attId = ApplicationAttemptId.fromString(attemptId); appId = attId.getApplicationId(); sysout.println("Failing attempt " + attId + " of application " + appId); @@ -583,8 +582,8 @@ public class ApplicationCLI extends YarnCLI { throws YarnException, IOException { ApplicationReport appReport = null; try { - appReport = client.getApplicationReport(ConverterUtils - .toApplicationId(applicationId)); + appReport = client.getApplicationReport( + ApplicationId.fromString(applicationId)); } catch (ApplicationNotFoundException e) { sysout.println("Application with id '" + applicationId + "' doesn't exist in RM or Timeline Server."); @@ -684,7 +683,7 @@ public class ApplicationCLI extends YarnCLI { new OutputStreamWriter(sysout, Charset.forName("UTF-8"))); List appAttemptsReport = client - .getApplicationAttempts(ConverterUtils.toApplicationId(applicationId)); + .getApplicationAttempts(ApplicationId.fromString(applicationId)); writer.println("Total number of application attempts " + ":" + appAttemptsReport.size()); writer.printf(APPLICATION_ATTEMPTS_PATTERN, "ApplicationAttempt-Id", @@ -711,8 +710,8 @@ public class ApplicationCLI extends YarnCLI { PrintWriter writer = new PrintWriter( new OutputStreamWriter(sysout, Charset.forName("UTF-8"))); - List appsReport = client - .getContainers(ConverterUtils.toApplicationAttemptId(appAttemptId)); + List appsReport = client.getContainers( + ApplicationAttemptId.fromString(appAttemptId)); writer.println("Total number of containers " + ":" + appsReport.size()); writer.printf(CONTAINER_PATTERN, "Container-Id", "Start Time", "Finish Time", "State", "Host", "Node Http Address", "LOG-URL"); @@ -735,7 +734,7 @@ public class ApplicationCLI extends YarnCLI { */ private void updateApplicationPriority(String applicationId, String priority) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); Priority newAppPriority = Priority.newInstance(Integer.parseInt(priority)); sysout.println("Updating priority of an application " + applicationId); client.updateApplicationPriority(appId, newAppPriority); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java index 5a6a8a511eb..94cb1662135 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java @@ -45,6 +45,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Tool; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerReport; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.api.YarnClient; @@ -199,7 +200,7 @@ public class LogsCLI extends Configured implements Tool { ApplicationId appId = null; try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (Exception e) { System.err.println("Invalid ApplicationId specified"); return -1; @@ -523,8 +524,8 @@ public class LogsCLI extends Configured implements Tool { throws YarnException, IOException { YarnClient yarnClient = createYarnClient(); try { - return yarnClient.getContainerReport(ConverterUtils - .toContainerId(containerIdStr)); + return yarnClient.getContainerReport( + ContainerId.fromString(containerIdStr)); } finally { yarnClient.close(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java index a89551f9c51..f51fee929cf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java @@ -243,7 +243,7 @@ public class NodeCLI extends YarnCLI { */ private void printNodeStatus(String nodeIdStr) throws YarnException, IOException { - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); List nodesReport = client.getNodeReports(); // Use PrintWriter.println, which uses correct platform line ending. ByteArrayOutputStream baos = new ByteArrayOutputStream(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java index d407c206f5b..aa7fc30344b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java @@ -427,7 +427,7 @@ public class RMAdminCLI extends HAAdmin { ResourceManagerAdministrationProtocol adminProtocol = createAdminProtocol(); UpdateNodeResourceRequest request = recordFactory.newRecordInstance(UpdateNodeResourceRequest.class); - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); Resource resource = Resources.createResource(memSize, cores); Map resourceMap = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java index 057594d5bb2..15513338391 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java @@ -222,7 +222,7 @@ public class TestRMAdminCLI { verify(admin).updateNodeResource(argument.capture()); UpdateNodeResourceRequest request = argument.getValue(); Map resourceMap = request.getNodeResourceMap(); - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); Resource expectedResource = Resources.createResource(memSize, cores); ResourceOption resource = resourceMap.get(nodeId); assertNotNull("resource for " + nodeIdStr + " shouldn't be null.", diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java index 4c1d152ccd5..a80f9d7629e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java @@ -99,7 +99,7 @@ public class AggregatedLogDeletionService extends AbstractService { if(appDir.isDirectory() && appDir.getModificationTime() < cutoffMillis) { boolean appTerminated = - isApplicationTerminated(ConverterUtils.toApplicationId(appDir + isApplicationTerminated(ApplicationId.fromString(appDir .getPath().getName()), rmClient); if(appTerminated && shouldDeleteLogDir(appDir, cutoffMillis, fs)) { try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index c9453b31473..f5dbc9272d8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -193,14 +193,11 @@ public class AggregatedLogFormat { private Set getPendingLogFilesToUploadForThisContainer() { Set pendingUploadFiles = new HashSet(); for (String rootLogDir : this.rootLogDirs) { - File appLogDir = - new File(rootLogDir, - ConverterUtils.toString( - this.containerId.getApplicationAttemptId(). - getApplicationId()) - ); + File appLogDir = new File(rootLogDir, + this.containerId.getApplicationAttemptId(). + getApplicationId().toString()); File containerLogDir = - new File(appLogDir, ConverterUtils.toString(this.containerId)); + new File(appLogDir, this.containerId.toString()); if (!containerLogDir.isDirectory()) { continue; // ContainerDir may have been deleted by the user. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java index fb4d3cd943d..93377924384 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java @@ -62,7 +62,7 @@ public class LogCLIHelpers implements Configurable { YarnConfiguration.NM_REMOTE_APP_LOG_DIR, YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); String suffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf()); - ApplicationId applicationId = ConverterUtils.toApplicationId(appId); + ApplicationId applicationId = ApplicationId.fromString(appId); Path remoteAppLogDir = LogAggregationUtils.getRemoteAppLogDir( remoteRootLogDir, applicationId, jobOwner, suffix); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java index acd29fb02d6..67bc2b74fd1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java @@ -18,18 +18,13 @@ package org.apache.hadoop.yarn.util; -import static org.apache.hadoop.yarn.util.StringHelper._split; - import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Map.Entry; -import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.SecurityUtil; @@ -41,7 +36,6 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; /** @@ -49,7 +43,7 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; * from/to 'serializableFormat' to/from hadoop/nativejava data structures. * */ -@Private +@Public public class ConverterUtils { public static final String APPLICATION_PREFIX = "application"; @@ -58,174 +52,114 @@ public class ConverterUtils { /** * return a hadoop path from a given url + * This method is deprecated, use {@link URL#toPath()} instead. * * @param url * url to convert * @return path from {@link URL} * @throws URISyntaxException */ + @Public + @Deprecated public static Path getPathFromYarnURL(URL url) throws URISyntaxException { - String scheme = url.getScheme() == null ? "" : url.getScheme(); - - String authority = ""; - if (url.getHost() != null) { - authority = url.getHost(); - if (url.getUserInfo() != null) { - authority = url.getUserInfo() + "@" + authority; - } - if (url.getPort() > 0) { - authority += ":" + url.getPort(); - } - } - - return new Path( - (new URI(scheme, authority, url.getFile(), null, null)).normalize()); + return url.toPath(); } - - /** - * change from CharSequence to string for map key and value - * @param env map for converting - * @return string,string map + + /* + * This method is deprecated, use {@link URL#fromPath(Path)} instead. */ - public static Map convertToString( - Map env) { - - Map stringMap = new HashMap(); - for (Entry entry: env.entrySet()) { - stringMap.put(entry.getKey().toString(), entry.getValue().toString()); - } - return stringMap; - } - + @Public + @Deprecated public static URL getYarnUrlFromPath(Path path) { - return getYarnUrlFromURI(path.toUri()); + return URL.fromPath(path); } + /* + * This method is deprecated, use {@link URL#fromURI(URI)} instead. + */ + @Public + @Deprecated public static URL getYarnUrlFromURI(URI uri) { - URL url = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(URL.class); - if (uri.getHost() != null) { - url.setHost(uri.getHost()); - } - if (uri.getUserInfo() != null) { - url.setUserInfo(uri.getUserInfo()); - } - url.setPort(uri.getPort()); - url.setScheme(uri.getScheme()); - url.setFile(uri.getPath()); - return url; + return URL.fromURI(uri); } + /* + * This method is deprecated, use {@link ApplicationId#toString()} instead. + */ + @Public + @Deprecated public static String toString(ApplicationId appId) { return appId.toString(); } + /* + * This method is deprecated, use {@link ApplicationId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ApplicationId toApplicationId(RecordFactory recordFactory, - String appIdStr) { - Iterator it = _split(appIdStr).iterator(); - if (!it.next().equals(APPLICATION_PREFIX)) { - throw new IllegalArgumentException("Invalid ApplicationId prefix: " - + appIdStr + ". The valid ApplicationId should start with prefix " - + APPLICATION_PREFIX); - } - try { - return toApplicationId(recordFactory, it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid ApplicationId: " + appIdStr, - n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid ApplicationId: " + appIdStr, - e); - } - } - - private static ApplicationId toApplicationId(RecordFactory recordFactory, - Iterator it) { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - return appId; - } - - private static ApplicationAttemptId toApplicationAttemptId( - Iterator it) throws NumberFormatException { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next())); - return appAttemptId; - } - - private static ApplicationId toApplicationId( - Iterator it) throws NumberFormatException { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - return appId; + String applicationIdStr) { + return ApplicationId.fromString(applicationIdStr); } + /* + * This method is deprecated, use {@link ContainerId#toString()} instead. + */ + @Public + @Deprecated public static String toString(ContainerId cId) { return cId == null ? null : cId.toString(); } - + + @Private + @InterfaceStability.Unstable public static NodeId toNodeIdWithDefaultPort(String nodeIdStr) { if (nodeIdStr.indexOf(":") < 0) { - return toNodeId(nodeIdStr + ":0"); + return NodeId.fromString(nodeIdStr + ":0"); } - return toNodeId(nodeIdStr); + return NodeId.fromString(nodeIdStr); } + /* + * This method is deprecated, use {@link NodeId#fromString(String)} instead. + */ + @Public + @Deprecated public static NodeId toNodeId(String nodeIdStr) { - String[] parts = nodeIdStr.split(":"); - if (parts.length != 2) { - throw new IllegalArgumentException("Invalid NodeId [" + nodeIdStr - + "]. Expected host:port"); - } - try { - NodeId nodeId = - NodeId.newInstance(parts[0].trim(), Integer.parseInt(parts[1])); - return nodeId; - } catch (NumberFormatException e) { - throw new IllegalArgumentException("Invalid port: " + parts[1], e); - } + return NodeId.fromString(nodeIdStr); } + /* + * This method is deprecated, use {@link ContainerId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ContainerId toContainerId(String containerIdStr) { return ContainerId.fromString(containerIdStr); } - + + /* + * This method is deprecated, use {@link ApplicationAttemptId#toString()} + * instead. + */ + @Public + @Deprecated public static ApplicationAttemptId toApplicationAttemptId( - String applicationAttmeptIdStr) { - Iterator it = _split(applicationAttmeptIdStr).iterator(); - if (!it.next().equals(APPLICATION_ATTEMPT_PREFIX)) { - throw new IllegalArgumentException("Invalid AppAttemptId prefix: " - + applicationAttmeptIdStr); - } - try { - return toApplicationAttemptId(it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid AppAttemptId: " - + applicationAttmeptIdStr, n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid AppAttemptId: " - + applicationAttmeptIdStr, e); - } + String applicationAttemptIdStr) { + return ApplicationAttemptId.fromString(applicationAttemptIdStr); } + /* + * This method is deprecated, use {@link ApplicationId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ApplicationId toApplicationId( String appIdStr) { - Iterator it = _split(appIdStr).iterator(); - if (!it.next().equals(APPLICATION_PREFIX)) { - throw new IllegalArgumentException("Invalid ApplicationId prefix: " - + appIdStr + ". The valid ApplicationId should start with prefix " - + APPLICATION_PREFIX); - } - try { - return toApplicationId(it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid ApplicationId: " - + appIdStr, n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid ApplicationId: " - + appIdStr, e); - } + return ApplicationId.fromString(appIdStr); } /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java index bd9c907418e..de18dc63d5a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java @@ -346,7 +346,7 @@ public class FSDownload implements Callable { public Path call() throws Exception { final Path sCopy; try { - sCopy = ConverterUtils.getPathFromYarnURL(resource.getResource()); + sCopy = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException("Invalid resource", e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java index 69fc347b1d8..91665229468 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java @@ -279,7 +279,7 @@ public class AggregatedLogsBlock extends HtmlBlock { } ContainerId containerId = null; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); } catch (IllegalArgumentException e) { html.h1() ._("Cannot get container logs for invalid containerId: " @@ -297,7 +297,7 @@ public class AggregatedLogsBlock extends HtmlBlock { } NodeId nodeId = null; try { - nodeId = ConverterUtils.toNodeId(nodeIdStr); + nodeId = NodeId.fromString(nodeIdStr); } catch (IllegalArgumentException e) { html.h1()._("Cannot get container logs. Invalid nodeId: " + nodeIdStr) ._(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java index faf4a774447..d8c0b75351c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java @@ -391,7 +391,7 @@ public class WebAppUtils { } ApplicationId aid = null; try { - aid = ConverterUtils.toApplicationId(recordFactory, appId); + aid = ApplicationId.fromString(appId); } catch (Exception e) { throw new BadRequestException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java index 3cec38b060c..077558b96a6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java @@ -34,55 +34,56 @@ public class TestConverterUtils { @Test public void testConvertUrlWithNoPort() throws URISyntaxException { Path expectedPath = new Path("hdfs://foo.com"); - URL url = ConverterUtils.getYarnUrlFromPath(expectedPath); - Path actualPath = ConverterUtils.getPathFromYarnURL(url); + URL url = URL.fromPath(expectedPath); + Path actualPath = url.toPath(); assertEquals(expectedPath, actualPath); } @Test public void testConvertUrlWithUserinfo() throws URISyntaxException { Path expectedPath = new Path("foo://username:password@example.com:8042"); - URL url = ConverterUtils.getYarnUrlFromPath(expectedPath); - Path actualPath = ConverterUtils.getPathFromYarnURL(url); + URL url = URL.fromPath(expectedPath); + Path actualPath = url.toPath(); assertEquals(expectedPath, actualPath); } @Test public void testContainerId() throws URISyntaxException { ContainerId id = TestContainerId.newContainerId(0, 0, 0, 0); - String cid = ConverterUtils.toString(id); + String cid = id.toString(); assertEquals("container_0_0000_00_000000", cid); - ContainerId gen = ConverterUtils.toContainerId(cid); + ContainerId gen = ContainerId.fromString(cid); assertEquals(gen, id); } @Test public void testContainerIdWithEpoch() throws URISyntaxException { ContainerId id = TestContainerId.newContainerId(0, 0, 0, 25645811); - String cid = ConverterUtils.toString(id); + String cid = id.toString(); assertEquals("container_0_0000_00_25645811", cid); - ContainerId gen = ConverterUtils.toContainerId(cid); + ContainerId gen = ContainerId.fromString(cid); assertEquals(gen.toString(), id.toString()); long ts = System.currentTimeMillis(); ContainerId id2 = TestContainerId.newContainerId(36473, 4365472, ts, 4298334883325L); - String cid2 = ConverterUtils.toString(id2); + String cid2 = id2.toString(); assertEquals( "container_e03_" + ts + "_36473_4365472_999799999997", cid2); - ContainerId gen2 = ConverterUtils.toContainerId(cid2); + ContainerId gen2 = ContainerId.fromString(cid2); assertEquals(gen2.toString(), id2.toString()); ContainerId id3 = TestContainerId.newContainerId(36473, 4365472, ts, 844424930131965L); - String cid3 = ConverterUtils.toString(id3); + String cid3 = id3.toString(); assertEquals( "container_e767_" + ts + "_36473_4365472_1099511627773", cid3); - ContainerId gen3 = ConverterUtils.toContainerId(cid3); + ContainerId gen3 = ContainerId.fromString(cid3); assertEquals(gen3.toString(), id3.toString()); } @Test + @SuppressWarnings("deprecation") public void testContainerIdNull() throws URISyntaxException { assertNull(ConverterUtils.toString((ContainerId)null)); } @@ -101,16 +102,19 @@ public class TestConverterUtils { } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testInvalidContainerId() { - ConverterUtils.toContainerId("container_e20_1423221031460_0003_01"); + ContainerId.fromString("container_e20_1423221031460_0003_01"); } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testInvalidAppattemptId() { ConverterUtils.toApplicationAttemptId("appattempt_1423221031460"); } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testApplicationId() { ConverterUtils.toApplicationId("application_1423221031460"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java index 1125b1ed0c3..58b60231eb9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java @@ -18,12 +18,32 @@ package org.apache.hadoop.yarn.util; -import static org.apache.hadoop.fs.CreateFlag.CREATE; -import static org.apache.hadoop.fs.CreateFlag.OVERWRITE; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.LocalDirAllocator; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.yarn.api.records.LocalResource; +import org.apache.hadoop.yarn.api.records.LocalResourceType; +import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; +import org.apache.hadoop.yarn.factories.RecordFactory; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Test; import java.io.File; import java.io.FileOutputStream; @@ -53,33 +73,12 @@ import java.util.zip.GZIPOutputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; -import org.junit.Assert; - -import org.apache.commons.compress.archivers.tar.TarArchiveEntry; -import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileContext; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.LocalDirAllocator; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.api.records.LocalResource; -import org.apache.hadoop.yarn.api.records.LocalResourceType; -import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import org.junit.AfterClass; -import org.junit.Test; - -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; +import static org.apache.hadoop.fs.CreateFlag.CREATE; +import static org.apache.hadoop.fs.CreateFlag.OVERWRITE; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeTrue; public class TestFSDownload { @@ -103,7 +102,7 @@ public class TestFSDownload { Random r, LocalResourceVisibility vis) throws IOException { createFile(files, p, len, r); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(p)); + ret.setResource(URL.fromPath(p)); ret.setSize(len); ret.setType(LocalResourceType.FILE); ret.setVisibility(vis); @@ -134,7 +133,7 @@ public class TestFSDownload { LOG.info("Done writing jar stream "); out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(p)); + ret.setResource(URL.fromPath(p)); FileStatus status = files.getFileStatus(p); ret.setSize(status.getLen()); ret.setTimestamp(status.getModificationTime()); @@ -162,7 +161,7 @@ public class TestFSDownload { out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".tar"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -190,7 +189,7 @@ public class TestFSDownload { out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".tar.gz"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -216,7 +215,7 @@ public class TestFSDownload { out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".jar"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -242,7 +241,7 @@ public class TestFSDownload { out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".ZIP"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java index aedf6f656b6..84d45439445 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java @@ -278,7 +278,7 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService } if (field == ApplicationReportField.USER_AND_ACLS) { return new ApplicationReportExt(ApplicationReport.newInstance( - ConverterUtils.toApplicationId(entity.getEntityId()), + ApplicationId.fromString(entity.getEntityId()), latestApplicationAttemptId, user, queue, name, null, -1, null, state, diagnosticsInfo, null, createdTime, finishedTime, finalStatus, null, null, progress, type, null, appTags, @@ -394,13 +394,10 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService } if (eventInfo .containsKey(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO)) { - latestApplicationAttemptId = - ConverterUtils - .toApplicationAttemptId( - eventInfo - .get( - ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO) - .toString()); + latestApplicationAttemptId = ApplicationAttemptId.fromString( + eventInfo.get( + ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO) + .toString()); } if (eventInfo .containsKey(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { @@ -426,7 +423,7 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService } } return new ApplicationReportExt(ApplicationReport.newInstance( - ConverterUtils.toApplicationId(entity.getEntityId()), + ApplicationId.fromString(entity.getEntityId()), latestApplicationAttemptId, user, queue, name, null, -1, null, state, diagnosticsInfo, null, createdTime, finishedTime, finalStatus, appResources, null, progress, type, null, appTags, unmanagedApplication, @@ -471,7 +468,7 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService if (eventInfo .containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO)) { amContainerId = - ConverterUtils.toContainerId(eventInfo.get( + ContainerId.fromString(eventInfo.get( AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO) .toString()); } @@ -513,7 +510,7 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService if (eventInfo .containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO)) { amContainerId = - ConverterUtils.toContainerId(eventInfo.get( + ContainerId.fromString(eventInfo.get( AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO) .toString()); } @@ -521,7 +518,7 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService } } return ApplicationAttemptReport.newInstance( - ConverterUtils.toApplicationAttemptId(entity.getEntityId()), + ApplicationAttemptId.fromString(entity.getEntityId()), host, rpcPort, trackingUrl, originalTrackingUrl, diagnosticsInfo, state, amContainerId); } @@ -610,7 +607,7 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService } } ContainerId containerId = - ConverterUtils.toContainerId(entity.getEntityId()); + ContainerId.fromString(entity.getEntityId()); String logUrl = null; NodeId allocatedNode = null; if (allocatedHost != null) { @@ -623,7 +620,7 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService user); } return ContainerReport.newInstance( - ConverterUtils.toContainerId(entity.getEntityId()), + ContainerId.fromString(entity.getEntityId()), Resource.newInstance(allocatedMem, allocatedVcore), allocatedNode, Priority.newInstance(allocatedPriority), createdTime, finishedTime, diagnosticsInfo, logUrl, exitStatus, state, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java index c340b190252..295b8ab6351 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java @@ -204,7 +204,7 @@ public class FileSystemApplicationHistoryStore extends AbstractService FileStatus[] files = fs.listStatus(rootDirPath); for (FileStatus file : files) { ApplicationId appId = - ConverterUtils.toApplicationId(file.getPath().getName()); + ApplicationId.fromString(file.getPath().getName()); try { ApplicationHistoryData historyData = getApplication(appId); if (historyData != null) { @@ -231,8 +231,8 @@ public class FileSystemApplicationHistoryStore extends AbstractService HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.startsWith( ConverterUtils.APPLICATION_ATTEMPT_PREFIX)) { - ApplicationAttemptId appAttemptId = - ConverterUtils.toApplicationAttemptId(entry.key.id); + ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString( + entry.key.id); if (appAttemptId.getApplicationId().equals(appId)) { ApplicationAttemptHistoryData historyData = historyDataMap.get(appAttemptId); @@ -385,7 +385,7 @@ public class FileSystemApplicationHistoryStore extends AbstractService HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.startsWith(ConverterUtils.CONTAINER_PREFIX)) { ContainerId containerId = - ConverterUtils.toContainerId(entry.key.id); + ContainerId.fromString(entry.key.id); if (containerId.getApplicationAttemptId().equals(appAttemptId)) { ContainerHistoryData historyData = historyDataMap.get(containerId); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java index 1d1b1863698..d3e76f4fb8b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java @@ -112,7 +112,7 @@ public class BuilderUtils { public static LocalResource newLocalResource(URI uri, LocalResourceType type, LocalResourceVisibility visibility, long size, long timestamp, boolean shouldBeUploadedToSharedCache) { - return newLocalResource(ConverterUtils.getYarnUrlFromURI(uri), type, + return newLocalResource(URL.fromURI(uri), type, visibility, size, timestamp, shouldBeUploadedToSharedCache); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java index 9c2a1ae04de..798c3726739 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java @@ -65,7 +65,7 @@ public class AppAttemptBlock extends HtmlBlock { } try { - appAttemptId = ConverterUtils.toApplicationAttemptId(attemptid); + appAttemptId = ApplicationAttemptId.fromString(attemptid); } catch (IllegalArgumentException e) { puts("Invalid application attempt ID: " + attemptid); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java index cae8d2e6fb5..893e82384f2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java @@ -59,7 +59,7 @@ public class ContainerBlock extends HtmlBlock { ContainerId containerId = null; try { - containerId = ConverterUtils.toContainerId(containerid); + containerId = ContainerId.fromString(containerid); } catch (IllegalArgumentException e) { puts("Invalid container ID: " + containerid); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java index 19ea30136e8..904c5118f44 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java @@ -431,7 +431,7 @@ public class WebServices { } ApplicationId aid = null; try { - aid = ConverterUtils.toApplicationId(appId); + aid = ApplicationId.fromString(appId); } catch (Exception e) { throw new BadRequestException(e); } @@ -449,7 +449,7 @@ public class WebServices { } ApplicationAttemptId aaid = null; try { - aaid = ConverterUtils.toApplicationAttemptId(appAttemptId); + aaid = ApplicationAttemptId.fromString(appAttemptId); } catch (Exception e) { throw new BadRequestException(e); } @@ -466,7 +466,7 @@ public class WebServices { } ContainerId cid = null; try { - cid = ConverterUtils.toContainerId(containerId); + cid = ContainerId.fromString(containerId); } catch (Exception e) { throw new BadRequestException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java index 0f0f66fa7a1..277b388aee6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java @@ -165,11 +165,10 @@ public class DefaultContainerExecutor extends ContainerExecutor { ContainerId containerId = container.getContainerId(); // create container dirs on all disks - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); String appIdStr = - ConverterUtils.toString( containerId.getApplicationAttemptId(). - getApplicationId()); + getApplicationId().toString(); for (String sLocalDir : localDirs) { Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, user); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java index 72da2365f5a..2b184694364 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java @@ -195,9 +195,9 @@ public class DockerContainerExecutor extends ContainerExecutor { ContainerId containerId = container.getContainerId(); // create container dirs on all disks - String containerIdStr = ConverterUtils.toString(containerId); - String appIdStr = ConverterUtils.toString( - containerId.getApplicationAttemptId().getApplicationId()); + String containerIdStr = containerId.toString(); + String appIdStr = + containerId.getApplicationAttemptId().getApplicationId().toString(); for (String sLocalDir : localDirs) { Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, userName); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java index 8db2ebbe1a9..2dc4ad41a24 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java @@ -297,7 +297,7 @@ public class LinuxContainerExecutor extends ContainerExecutor { String runAsUser = getRunAsUser(user); ContainerId containerId = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); resourcesHandler.preExecute(containerId, container.getResource()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java index a983363cd8b..74f581bcaef 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java @@ -1164,7 +1164,7 @@ public class ContainerImpl implements Container { public String toString() { this.readLock.lock(); try { - return ConverterUtils.toString(this.containerId); + return this.containerId.toString(); } finally { this.readLock.unlock(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java index 5bdfe12388b..b4377f6fee8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java @@ -155,7 +155,7 @@ public class ContainerLaunch implements Callable { final ContainerLaunchContext launchContext = container.getLaunchContext(); Map> localResources = null; ContainerId containerID = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerID); + String containerIdStr = containerID.toString(); final List command = launchContext.getCommands(); int ret = -1; @@ -377,7 +377,7 @@ public class ContainerLaunch implements Callable { @SuppressWarnings("unchecked") // dispatcher not typed public void cleanupContainer() throws IOException { ContainerId containerId = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); LOG.info("Cleaning up container " + containerIdStr); try { @@ -473,7 +473,7 @@ public class ContainerLaunch implements Callable { throws IOException { ContainerId containerId = container.getContainerTokenIdentifier().getContainerID(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); String user = container.getUser(); Signal signal = translateCommandToSignal(command); if (signal.equals(Signal.NULL)) { @@ -565,7 +565,7 @@ public class ContainerLaunch implements Callable { */ private String getContainerPid(Path pidFilePath) throws Exception { String containerIdStr = - ConverterUtils.toString(container.getContainerId()); + container.getContainerId().toString(); String processId = null; LOG.debug("Accessing pid for container " + containerIdStr + " from pid file " + pidFilePath); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java index b9bdcc6c0d7..3cd31b703d5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java @@ -68,9 +68,9 @@ public class RecoveredContainerLaunch extends ContainerLaunch { public Integer call() { int retCode = ExitCode.LOST.getExitCode(); ContainerId containerId = container.getContainerId(); - String appIdStr = ConverterUtils.toString( - containerId.getApplicationAttemptId().getApplicationId()); - String containerIdStr = ConverterUtils.toString(containerId); + String appIdStr = + containerId.getApplicationAttemptId().getApplicationId().toString(); + String containerIdStr = containerId.toString(); dispatcher.getEventHandler().handle(new ContainerEvent(containerId, ContainerEventType.CONTAINER_LAUNCHED)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java index 16bfd9e1d25..e4cd41d8d99 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java @@ -56,6 +56,7 @@ import org.apache.hadoop.util.DiskChecker; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.SerializedException; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; @@ -295,7 +296,7 @@ public class ContainerLocalizer { try { Path localPath = fPath.get(); stat.setLocalPath( - ConverterUtils.getYarnUrlFromPath(localPath)); + URL.fromPath(localPath)); stat.setLocalSize( FileUtil.getDU(new File(localPath.getParent().toUri()))); stat.setStatus(ResourceStatusType.FETCH_SUCCESS); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java index 607d0b40866..d2e8e22d459 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java @@ -43,7 +43,7 @@ public class LocalResourceRequest */ public LocalResourceRequest(LocalResource resource) throws URISyntaxException { - this(ConverterUtils.getPathFromYarnURL(resource.getResource()), + this(resource.getResource().toPath(), resource.getTimestamp(), resource.getType(), resource.getVisibility(), @@ -133,7 +133,7 @@ public class LocalResourceRequest @Override public URL getResource() { - return ConverterUtils.getYarnUrlFromPath(loc); + return URL.fromPath(loc); } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java index 5db51453c29..30d5191072f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java @@ -79,6 +79,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; @@ -301,7 +302,7 @@ public class ResourceLocalizationService extends CompositeService trackerState = appEntry.getValue(); if (!trackerState.isEmpty()) { ApplicationId appId = appEntry.getKey(); - String appIdStr = ConverterUtils.toString(appId); + String appIdStr = appId.toString(); LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user, appId, dispatcher, false, super.getConfig(), stateStore); LocalResourcesTracker oldTracker = appRsrc.putIfAbsent(appIdStr, @@ -442,7 +443,7 @@ public class ResourceLocalizationService extends CompositeService String userName = app.getUser(); privateRsrc.putIfAbsent(userName, new LocalResourcesTrackerImpl(userName, null, dispatcher, true, super.getConfig(), stateStore)); - String appIdStr = ConverterUtils.toString(app.getAppId()); + String appIdStr = app.getAppId().toString(); appRsrc.putIfAbsent(appIdStr, new LocalResourcesTrackerImpl(app.getUser(), app.getAppId(), dispatcher, false, super.getConfig(), stateStore)); // 1) Signal container init @@ -491,7 +492,7 @@ public class ResourceLocalizationService extends CompositeService private void handleContainerResourcesLocalized( ContainerLocalizationEvent event) { Container c = event.getContainer(); - String locId = ConverterUtils.toString(c.getContainerId()); + String locId = c.getContainerId().toString(); localizerTracker.endContainerLocalization(locId); } @@ -528,14 +529,15 @@ public class ResourceLocalizationService extends CompositeService c.getContainerId())); } } - String locId = ConverterUtils.toString(c.getContainerId()); + String locId = c.getContainerId().toString(); localizerTracker.cleanupPrivLocalizers(locId); // Delete the container directories String userName = c.getUser(); String containerIDStr = c.toString(); - String appIDStr = ConverterUtils.toString( - c.getContainerId().getApplicationAttemptId().getApplicationId()); + String appIDStr = + c.getContainerId().getApplicationAttemptId().getApplicationId() + .toString(); // Try deleting from good local dirs and full local dirs because a dir might // have gone bad while the app was running(disk full). In addition @@ -583,7 +585,7 @@ public class ResourceLocalizationService extends CompositeService ApplicationId appId = application.getAppId(); String appIDStr = application.toString(); LocalResourcesTracker appLocalRsrcsTracker = - appRsrc.remove(ConverterUtils.toString(appId)); + appRsrc.remove(appId.toString()); if (appLocalRsrcsTracker != null) { for (LocalizedResource rsrc : appLocalRsrcsTracker ) { Path localPath = rsrc.getLocalPath(); @@ -637,7 +639,7 @@ public class ResourceLocalizationService extends CompositeService case PRIVATE: return privateRsrc.get(user); case APPLICATION: - return appRsrc.get(ConverterUtils.toString(appId)); + return appRsrc.get(appId.toString()); } } @@ -977,7 +979,7 @@ public class ResourceLocalizationService extends CompositeService LocalResourceRequest nextRsrc = nRsrc.getRequest(); LocalResource next = recordFactory.newRecordInstance(LocalResource.class); - next.setResource(ConverterUtils.getYarnUrlFromPath(nextRsrc + next.setResource(URL.fromPath(nextRsrc .getPath())); next.setTimestamp(nextRsrc.getTimestamp()); next.setType(nextRsrc.getType()); @@ -1028,8 +1030,8 @@ public class ResourceLocalizationService extends CompositeService try { getLocalResourcesTracker(req.getVisibility(), user, applicationId) .handle( - new ResourceLocalizedEvent(req, ConverterUtils - .getPathFromYarnURL(stat.getLocalPath()), stat.getLocalSize())); + new ResourceLocalizedEvent(req, stat.getLocalPath().toPath(), + stat.getLocalSize())); } catch (URISyntaxException e) { } // unlocking the resource and removing it from scheduled resource @@ -1142,8 +1144,8 @@ public class ResourceLocalizationService extends CompositeService .setNmPrivateContainerTokens(nmPrivateCTokensPath) .setNmAddr(localizationServerAddress) .setUser(context.getUser()) - .setAppId(ConverterUtils.toString(context.getContainerId() - .getApplicationAttemptId().getApplicationId())) + .setAppId(context.getContainerId() + .getApplicationAttemptId().getApplicationId().toString()) .setLocId(localizerId) .setDirsHandler(dirsHandler) .build()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java index 2e05dd7abdb..0e732a7ce58 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java @@ -37,7 +37,7 @@ public class LocalizerResourceRequestEvent extends LocalizerEvent { public LocalizerResourceRequestEvent(LocalizedResource resource, LocalResourceVisibility vis, LocalizerContext context, String pattern) { super(LocalizerEventType.REQUEST_RESOURCE_LOCALIZATION, - ConverterUtils.toString(context.getContainerId())); + context.getContainerId().toString()); this.vis = vis; this.context = context; this.resource = resource; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java index 682b2726d1e..b034e7a209f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java @@ -211,7 +211,7 @@ class SharedCacheUploader implements Callable { final Path remotePath; try { - remotePath = ConverterUtils.getPathFromYarnURL(resource.getResource()); + remotePath = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException("Invalid resource", e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java index 32b0934807c..a5b1e2c5ec8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java @@ -147,7 +147,7 @@ public class AppLogAggregatorImpl implements AppLogAggregator { this.conf = conf; this.delService = deletionService; this.appId = appId; - this.applicationId = ConverterUtils.toString(appId); + this.applicationId = appId.toString(); this.userUgi = userUgi; this.dirsHandler = dirsHandler; this.remoteNodeLogFileForApp = remoteNodeLogFileForApp; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java index 1a8c6ff7068..e8708c604a8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java @@ -181,7 +181,7 @@ public class NMLeveldbStateStoreService extends NMStateStoreService { if (idEndPos < 0) { throw new IOException("Unable to determine container in key: " + key); } - ContainerId containerId = ConverterUtils.toContainerId( + ContainerId containerId = ContainerId.fromString( key.substring(CONTAINERS_KEY_PREFIX.length(), idEndPos)); String keyPrefix = key.substring(0, idEndPos+1); RecoveredContainerState rcs = loadContainerState(containerId, @@ -545,7 +545,7 @@ public class NMLeveldbStateStoreService extends NMStateStoreService { throw new IOException("Unable to determine appID in resource key: " + key); } - ApplicationId appId = ConverterUtils.toApplicationId( + ApplicationId appId = ApplicationId.fromString( key.substring(appIdStartPos, appIdEndPos)); userResources.appTrackerStates.put(appId, loadResourceTrackerState(iter, key.substring(0, appIdEndPos+1))); @@ -713,7 +713,7 @@ public class NMLeveldbStateStoreService extends NMStateStoreService { ApplicationAttemptId.appAttemptIdStrPrefix)) { ApplicationAttemptId attempt; try { - attempt = ConverterUtils.toApplicationAttemptId(key); + attempt = ApplicationAttemptId.fromString(key); } catch (IllegalArgumentException e) { throw new IOException("Bad application master key state for " + fullKey, e); @@ -817,7 +817,7 @@ public class NMLeveldbStateStoreService extends NMStateStoreService { ContainerId containerId; Long expTime; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); expTime = Long.parseLong(asString(value)); } catch (IllegalArgumentException e) { throw new IOException("Bad container token state for " + key, e); @@ -879,7 +879,7 @@ public class NMLeveldbStateStoreService extends NMStateStoreService { String appIdStr = fullKey.substring(logDeleterKeyPrefixLength); ApplicationId appId = null; try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (IllegalArgumentException e) { LOG.warn("Skipping unknown log deleter key " + fullKey); continue; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java index 21cf1f27c24..21c3c064040 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java @@ -28,7 +28,7 @@ public class NodeManagerBuilderUtils { public static ResourceLocalizationSpec newResourceLocalizationSpec( LocalResource rsrc, Path path) { - URL local = ConverterUtils.getYarnUrlFromPath(path); + URL local = URL.fromPath(path); ResourceLocalizationSpec resourceLocalizationSpec = Records.newRecord(ResourceLocalizationSpec.class); resourceLocalizationSpec.setDestinationDirectory(local); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java index 80d4db24988..5a7dba7ebc1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java @@ -27,6 +27,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.Shell; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.ConverterUtils; /** @@ -69,7 +70,7 @@ public class ProcessIdFileReader { // On Windows, pid is expected to be a container ID, so find first // line that parses successfully as a container ID. try { - ConverterUtils.toContainerId(temp); + ContainerId.fromString(temp); processId = temp; break; } catch (Exception e) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java index 1a92491bb4b..d5c3b8f0995 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java @@ -75,9 +75,7 @@ public class ApplicationPage extends NMView implements YarnWebParams { @Override protected void render(Block html) { - ApplicationId applicationID = - ConverterUtils.toApplicationId(this.recordFactory, - $(APPLICATION_ID)); + ApplicationId applicationID = ApplicationId.fromString($(APPLICATION_ID)); Application app = this.nmContext.getApplications().get(applicationID); AppInfo info = new AppInfo(app); info("Application's information") diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java index 2fd6b2cdf11..3e5f4d2e49d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java @@ -92,7 +92,7 @@ public class ContainerLogsPage extends NMView { ContainerId containerId; try { - containerId = ConverterUtils.toContainerId($(CONTAINER_ID)); + containerId = ContainerId.fromString($(CONTAINER_ID)); } catch (IllegalArgumentException ex) { html.h1("Invalid container ID: " + $(CONTAINER_ID)); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java index 319f49be5a6..35e75939f30 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java @@ -78,8 +78,8 @@ public class ContainerLogsUtils { List containerLogDirs = new ArrayList(logDirs.size()); for (String logDir : logDirs) { logDir = new File(logDir).toURI().getPath(); - String appIdStr = ConverterUtils.toString(containerId - .getApplicationAttemptId().getApplicationId()); + String appIdStr = containerId + .getApplicationAttemptId().getApplicationId().toString(); File appLogDir = new File(logDir, appIdStr); containerLogDirs.add(new File(appLogDir, containerId.toString())); } @@ -160,7 +160,7 @@ public class ContainerLogsUtils { public static FileInputStream openLogFileForRead(String containerIdStr, File logFile, Context context) throws IOException { - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); ApplicationId applicationId = containerId.getApplicationAttemptId() .getApplicationId(); String user = context.getApplications().get( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java index f4367bcb892..a1e0bc77108 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java @@ -63,7 +63,7 @@ public class ContainerPage extends NMView implements YarnWebParams { protected void render(Block html) { ContainerId containerID; try { - containerID = ConverterUtils.toContainerId($(CONTAINER_ID)); + containerID = ContainerId.fromString($(CONTAINER_ID)); } catch (IllegalArgumentException e) { html.p()._("Invalid containerId " + $(CONTAINER_ID))._(); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java index 06e9abc7c1e..02b2ab06076 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java @@ -181,7 +181,7 @@ public class NMWebServices { ContainerId containerId = null; init(); try { - containerId = ConverterUtils.toContainerId(id); + containerId = ContainerId.fromString(id); } catch (Exception e) { throw new BadRequestException("invalid container id, " + id); } @@ -218,7 +218,7 @@ public class NMWebServices { @PathParam("filename") String filename) { ContainerId containerId; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); } catch (IllegalArgumentException ex) { return Response.status(Status.BAD_REQUEST).build(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java index 95e2a6537bb..f55ca810d8d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java @@ -42,14 +42,14 @@ public class AppInfo { } // JAXB needs this public AppInfo(final Application app) { - this.id = ConverterUtils.toString(app.getAppId()); + this.id = app.getAppId().toString(); this.state = app.getApplicationState().toString(); this.user = app.getUser(); this.containerids = new ArrayList(); Map appContainers = app.getContainers(); for (ContainerId containerId : appContainers.keySet()) { - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); containerids.add(containerIdStr); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java index 9fb8ebf43e5..5f9b8830a14 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java @@ -118,7 +118,7 @@ public class TestNodeManagerReboot { ContainerId cId = createContainerId(); URL localResourceUri = - ConverterUtils.getYarnUrlFromPath(localFS.makeQualified(new Path( + URL.fromPath(localFS.makeQualified(new Path( localResourceDir.getAbsolutePath()))); LocalResource localResource = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java index b3d44f526ef..ee2677ce5d2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java @@ -741,7 +741,7 @@ public class TestNodeManagerResync { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java index 980c76440a9..b3ad31821d4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java @@ -200,7 +200,7 @@ public class TestNodeManagerShutdown { .getCanonicalHostName(), port); URL localResourceUri = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource localResource = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java index ce7e388590a..e3d3fa6ebe9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; +import org.apache.hadoop.yarn.api.records.URL; import org.junit.Assert; import org.apache.hadoop.conf.Configuration; @@ -62,8 +63,7 @@ public class TestPBRecordImpl { static LocalResource createResource() { LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); assertTrue(ret instanceof LocalResourcePBImpl); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path( - "hdfs://y.ak:8020/foo/bar"))); + ret.setResource(URL.fromPath(new Path("hdfs://y.ak:8020/foo/bar"))); ret.setSize(4344L); ret.setTimestamp(3141592653589793L); ret.setVisibility(LocalResourceVisibility.PUBLIC); @@ -76,7 +76,7 @@ public class TestPBRecordImpl { assertTrue(ret instanceof LocalResourceStatusPBImpl); ret.setResource(createResource()); ret.setLocalPath( - ConverterUtils.getYarnUrlFromPath( + URL.fromPath( new Path("file:///local/foo/bar"))); ret.setStatus(ResourceStatusType.FETCH_SUCCESS); ret.setLocalSize(4443L); @@ -109,8 +109,8 @@ public class TestPBRecordImpl { ResourceLocalizationSpec resource = recordFactory.newRecordInstance(ResourceLocalizationSpec.class); resource.setResource(rsrc); - resource.setDestinationDirectory(ConverterUtils - .getYarnUrlFromPath(new Path("/tmp" + System.currentTimeMillis()))); + resource.setDestinationDirectory( + URL.fromPath((new Path("/tmp" + System.currentTimeMillis())))); rsrcs.add(resource); ret.setResourceSpecs(rsrcs); System.out.println(resource); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index 3f5fc825c59..72a0221adbc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -212,7 +212,7 @@ public class TestContainerManager extends BaseContainerManagerTest { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(file.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); @@ -242,8 +242,8 @@ public class TestContainerManager extends BaseContainerManagerTest { // Now ascertain that the resources are localised correctly. ApplicationId appId = cId.getApplicationAttemptId().getApplicationId(); - String appIDStr = ConverterUtils.toString(appId); - String containerIDStr = ConverterUtils.toString(cId); + String appIDStr = appId.toString(); + String containerIDStr = cId.toString(); File userCacheDir = new File(localDir, ContainerLocalizer.USERCACHE); File userDir = new File(userCacheDir, user); File appCache = new File(userDir, ContainerLocalizer.APPCACHE); @@ -301,7 +301,7 @@ public class TestContainerManager extends BaseContainerManagerTest { recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -408,7 +408,7 @@ public class TestContainerManager extends BaseContainerManagerTest { recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -501,7 +501,7 @@ public class TestContainerManager extends BaseContainerManagerTest { // containerLaunchContext.resources = // new HashMap(); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(FileContext.getLocalFSFileContext() + URL.fromPath(FileContext.getLocalFSFileContext() .makeQualified(new Path(file.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); @@ -534,8 +534,8 @@ public class TestContainerManager extends BaseContainerManagerTest { ApplicationState.RUNNING); // Now ascertain that the resources are localised correctly. - String appIDStr = ConverterUtils.toString(appId); - String containerIDStr = ConverterUtils.toString(cId); + String appIDStr = appId.toString(); + String containerIDStr = cId.toString(); File userCacheDir = new File(localDir, ContainerLocalizer.USERCACHE); File userDir = new File(userCacheDir, user); File appCache = new File(userDir, ContainerLocalizer.APPCACHE); @@ -989,7 +989,7 @@ public class TestContainerManager extends BaseContainerManagerTest { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1073,7 +1073,7 @@ public class TestContainerManager extends BaseContainerManagerTest { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1227,7 +1227,7 @@ public class TestContainerManager extends BaseContainerManagerTest { ContainerId cId = createContainerId(0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java index 762a99a9289..6cfa9141ac6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java @@ -415,7 +415,7 @@ public class TestContainerManagerRecovery extends BaseContainerManagerTest { fileWriter.close(); FileContext localFS = FileContext.getLocalFSFileContext(); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = RecordFactoryProvider .getRecordFactory(null).newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index 502ecdec666..663f05960d6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -600,7 +600,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -801,7 +801,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1140,7 +1140,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java index 611fc05137c..fac708655f5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java @@ -404,7 +404,7 @@ public class TestContainerLocalizer { when(resourceLocalizationSpec.getResource()).thenReturn(rsrc); when(resourceLocalizationSpec.getDestinationDirectory()). - thenReturn(ConverterUtils.getYarnUrlFromPath(p)); + thenReturn(URL.fromPath(p)); return resourceLocalizationSpec; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java index 81446f5a875..13310ad5100 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java @@ -24,6 +24,7 @@ import java.util.Random; import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourceRequest; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -39,8 +40,10 @@ public class TestLocalResource { static org.apache.hadoop.yarn.api.records.LocalResource getYarnResource(Path p, long size, long timestamp, LocalResourceType type, LocalResourceVisibility state, String pattern) throws URISyntaxException { - org.apache.hadoop.yarn.api.records.LocalResource ret = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(org.apache.hadoop.yarn.api.records.LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromURI(p.toUri())); + org.apache.hadoop.yarn.api.records.LocalResource ret = + RecordFactoryProvider.getRecordFactory(null).newRecordInstance( + org.apache.hadoop.yarn.api.records.LocalResource.class); + ret.setResource(URL.fromURI(p.toUri())); ret.setSize(size); ret.setTimestamp(timestamp); ret.setType(type); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java index 64d3d68ce25..cf9f7b92953 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java @@ -945,7 +945,7 @@ public class TestResourceLocalizationService { // Sigh. Thread init of private localizer not accessible Thread.sleep(1000); dispatcher.await(); - String appStr = ConverterUtils.toString(appId); + String appStr = appId.toString(); String ctnrStr = c.getContainerId().toString(); ArgumentCaptor contextCaptor = ArgumentCaptor .forClass(LocalizerStartContext.class); @@ -2144,12 +2144,16 @@ public class TestResourceLocalizationService { // removing pending download request. spyService.getPublicLocalizer().pending.clear(); + LocalizerContext lc = mock(LocalizerContext.class); + when(lc.getContainerId()).thenReturn(ContainerId.newContainerId( + ApplicationAttemptId.newInstance(ApplicationId.newInstance(1L, 1), 1), + 1L)); + // Now I need to simulate a race condition wherein Event is added to // dispatcher before resource state changes to either FAILED or LOCALIZED // Hence sending event directly to dispatcher. LocalizerResourceRequestEvent localizerEvent = - new LocalizerResourceRequestEvent(lr, null, - mock(LocalizerContext.class), null); + new LocalizerResourceRequestEvent(lr, null, lc, null); dispatcher1.getEventHandler().handle(localizerEvent); // Waiting for download to start. This should return false as new download @@ -2457,7 +2461,7 @@ public class TestResourceLocalizationService { BuilderUtils.newApplicationId(314159265358979L, 3); when(app.getUser()).thenReturn(user); when(app.getAppId()).thenReturn(appId); - when(app.toString()).thenReturn(ConverterUtils.toString(appId)); + when(app.toString()).thenReturn(appId.toString()); // init container. final Container c = getMockContainer(appId, 42, user); @@ -2468,17 +2472,16 @@ public class TestResourceLocalizationService { Path usersdir = new Path(tmpDirs.get(i), ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, user); Path allAppsdir = new Path(userdir, ContainerLocalizer.APPCACHE); - Path appDir = new Path(allAppsdir, ConverterUtils.toString(appId)); + Path appDir = new Path(allAppsdir, appId.toString()); Path containerDir = - new Path(appDir, ConverterUtils.toString(c.getContainerId())); + new Path(appDir, c.getContainerId().toString()); containerLocalDirs.add(containerDir); appLocalDirs.add(appDir); Path sysDir = new Path(tmpDirs.get(i), ResourceLocalizationService.NM_PRIVATE_DIR); - Path appSysDir = new Path(sysDir, ConverterUtils.toString(appId)); - Path containerSysDir = - new Path(appSysDir, ConverterUtils.toString(c.getContainerId())); + Path appSysDir = new Path(sysDir, appId.toString()); + Path containerSysDir = new Path(appSysDir, c.getContainerId().toString()); nmLocalContainerDirs.add(containerSysDir); nmLocalAppDirs.add(appSysDir); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index 90b976ba201..d56d030fd36 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -191,7 +191,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -218,7 +218,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { verify(delSrvc).delete(eq(user), eq((Path) null), eq(new Path(app1LogDir.getAbsolutePath()))); - String containerIdStr = ConverterUtils.toString(container11); + String containerIdStr = container11.toString(); File containerLogDir = new File(app1LogDir, containerIdStr); int count = 0; int maxAttempts = 50; @@ -312,7 +312,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { logAggregationService.start(); ApplicationId app = BuilderUtils.newApplicationId(1234, 1); - File appLogDir = new File(localLogDir, ConverterUtils.toString(app)); + File appLogDir = new File(localLogDir, app.toString()); appLogDir.mkdir(); LogAggregationContext context = LogAggregationContext.newInstance("HOST*", "sys*"); @@ -349,7 +349,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -399,7 +399,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -420,7 +420,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { BuilderUtils.newApplicationAttemptId(application2, 1); File app2LogDir = - new File(localLogDir, ConverterUtils.toString(application2)); + new File(localLogDir, application2.toString()); app2LogDir.mkdir(); LogAggregationContext contextWithAMOnly = Records.newRecord(LogAggregationContext.class); @@ -449,7 +449,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { BuilderUtils.newApplicationAttemptId(application3, 1); File app3LogDir = - new File(localLogDir, ConverterUtils.toString(application3)); + new File(localLogDir, application3.toString()); app3LogDir.mkdir(); LogAggregationContext contextWithAMAndFailed = Records.newRecord(LogAggregationContext.class); @@ -580,7 +580,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { BuilderUtils.newApplicationId(System.currentTimeMillis(), (int) (Math.random() * 1000)); File appLogDir = - new File(localLogDir, ConverterUtils.toString(appId2)); + new File(localLogDir, appId2.toString()); appLogDir.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(appId2, this.user, null, this.acls, contextWithAMAndFailed)); @@ -755,7 +755,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { (int) (Math.random() * 1000)); File appLogDir = - new File(localLogDir, ConverterUtils.toString(appId)); + new File(localLogDir, appId.toString()); appLogDir.mkdir(); Exception e = new RuntimeException("KABOOM!"); @@ -802,7 +802,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { private void writeContainerLogs(File appLogDir, ContainerId containerId, String[] fileName) throws IOException { // ContainerLogDir should be created - String containerStr = ConverterUtils.toString(containerId); + String containerStr = containerId.toString(); File containerLogDir = new File(appLogDir, containerStr); boolean created = containerLogDir.mkdirs(); LOG.info("Created Dir:" + containerLogDir.getAbsolutePath() + " status :" @@ -940,7 +940,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { Assert.assertTrue("number of containers with logs should be at most " + minNumOfContainers,logMap.size() <= maxNumOfContainers); for (ContainerId cId : expectedContainerIds) { - String containerStr = ConverterUtils.toString(cId); + String containerStr = cId.toString(); Map thisContainerMap = logMap.remove(containerStr); Assert.assertEquals(numOfLogsPerContainer, thisContainerMap.size()); for (String fileType : logFiles) { @@ -995,7 +995,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1295,7 +1295,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { // has only logs from stdout and syslog // AppLogDir should be created File appLogDir1 = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); appLogDir1.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(application1, this.user, null, this.acls, @@ -1320,7 +1320,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { BuilderUtils.newApplicationAttemptId(application2, 1); File app2LogDir = - new File(localLogDir, ConverterUtils.toString(application2)); + new File(localLogDir, application2.toString()); app2LogDir.mkdir(); LogAggregationContextWithExcludePatterns.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -1345,7 +1345,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { ApplicationAttemptId appAttemptId3 = BuilderUtils.newApplicationAttemptId(application3, 1); File app3LogDir = - new File(localLogDir, ConverterUtils.toString(application3)); + new File(localLogDir, application3.toString()); app3LogDir.mkdir(); context1.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -1370,7 +1370,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { ApplicationAttemptId appAttemptId4 = BuilderUtils.newApplicationAttemptId(application4, 1); File app4LogDir = - new File(localLogDir, ConverterUtils.toString(application4)); + new File(localLogDir, application4.toString()); app4LogDir.mkdir(); context2.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -1872,7 +1872,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { containerType); // Simulate log-file creation File appLogDir1 = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); appLogDir1.mkdir(); writeContainerLogs(appLogDir1, containerId, logFiles); @@ -1983,7 +1983,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { // AppLogDir should be created File appLogDir = - new File(localLogDir, ConverterUtils.toString(application)); + new File(localLogDir, application.toString()); appLogDir.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(application, this.user, null, this.acls, logAggregationContextWithInterval)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java index 94145e40f61..1b4e3b7d77c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java @@ -210,7 +210,7 @@ public class TestContainersMonitor extends BaseContainerManagerTest { ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java index e44e5e51e64..5ac237c78b9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java @@ -350,7 +350,7 @@ public class TestNMLeveldbStateStoreService { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -383,7 +383,7 @@ public class TestNMLeveldbStateStoreService { // start some public and private resources Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -392,7 +392,7 @@ public class TestNMLeveldbStateStoreService { pubRsrcLocalPath1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -401,7 +401,7 @@ public class TestNMLeveldbStateStoreService { pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); @@ -446,7 +446,7 @@ public class TestNMLeveldbStateStoreService { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -486,7 +486,7 @@ public class TestNMLeveldbStateStoreService { // start some public and private resources Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -495,7 +495,7 @@ public class TestNMLeveldbStateStoreService { pubRsrcLocalPath1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -504,7 +504,7 @@ public class TestNMLeveldbStateStoreService { pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); @@ -565,7 +565,7 @@ public class TestNMLeveldbStateStoreService { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -595,7 +595,7 @@ public class TestNMLeveldbStateStoreService { // add public and private resources and remove some Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -611,7 +611,7 @@ public class TestNMLeveldbStateStoreService { stateStore.finishResourceLocalization(null, null, pubLocalizedProto1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -628,7 +628,7 @@ public class TestNMLeveldbStateStoreService { stateStore.removeLocalizedResource(null, null, pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java index f0c7cbcde13..40e984f873d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java @@ -249,7 +249,7 @@ public class TestNMWebServer { containerLogDir.mkdirs(); for (String fileType : new String[] { "stdout", "stderr", "syslog" }) { Writer writer = new FileWriter(new File(containerLogDir, fileType)); - writer.write(ConverterUtils.toString(containerId) + "\n Hello " + writer.write(containerId.toString() + "\n Hello " + fileType + "!"); writer.close(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java index 3c4a660c88a..529e6a5c58c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java @@ -40,6 +40,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.util.NodeHealthScriptRunner; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; @@ -280,7 +281,7 @@ public class TestNMWebServicesContainers extends JerseyTestBase { verifyNodeContainerInfo( conInfo.getJSONObject(i), nmContext.getContainers().get( - ConverterUtils.toContainerId(conInfo.getJSONObject(i).getString( + ContainerId.fromString(conInfo.getJSONObject(i).getString( "id")))); } } @@ -316,7 +317,7 @@ public class TestNMWebServicesContainers extends JerseyTestBase { assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); verifyNodeContainerInfo(json.getJSONObject("container"), nmContext - .getContainers().get(ConverterUtils.toContainerId(id))); + .getContainers().get(ContainerId.fromString(id))); } } @@ -449,7 +450,7 @@ public class TestNMWebServicesContainers extends JerseyTestBase { NodeList nodes = dom.getElementsByTagName("container"); assertEquals("incorrect number of elements", 1, nodes.getLength()); verifyContainersInfoXML(nodes, - nmContext.getContainers().get(ConverterUtils.toContainerId(id))); + nmContext.getContainers().get(ContainerId.fromString(id))); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java index ee4419d6e37..7e4eb915938 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java @@ -1310,7 +1310,7 @@ public class ResourceManager extends CompositeService implements Recoverable { rmStore.init(conf); rmStore.start(); try { - ApplicationId removeAppId = ConverterUtils.toApplicationId(applicationId); + ApplicationId removeAppId = ApplicationId.fromString(applicationId); LOG.info("Deleting application " + removeAppId + " from state store"); rmStore.removeApplication(removeAppId); LOG.info("Application is deleted from state store"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java index 913412a7ae5..2a985092df2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java @@ -499,7 +499,7 @@ public class LeveldbRMStateStore extends RMStateStore { private ApplicationStateData createApplicationState(String appIdStr, byte[] data) throws IOException { - ApplicationId appId = ConverterUtils.toApplicationId(appIdStr); + ApplicationId appId = ApplicationId.fromString(appIdStr); ApplicationStateDataPBImpl appState = new ApplicationStateDataPBImpl( ApplicationStateDataProto.parseFrom(data)); @@ -529,8 +529,7 @@ public class LeveldbRMStateStore extends RMStateStore { private ApplicationAttemptStateData createAttemptState(String itemName, byte[] data) throws IOException { - ApplicationAttemptId attemptId = - ConverterUtils.toApplicationAttemptId(itemName); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString(itemName); ApplicationAttemptStateDataPBImpl attemptState = new ApplicationAttemptStateDataPBImpl( ApplicationAttemptStateDataProto.parseFrom(data)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java index e6d6ba30281..0f3ebe6ba0c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java @@ -543,7 +543,7 @@ public class ZKRMStateStore extends RMStateStore { if (LOG.isDebugEnabled()) { LOG.debug("Loading application from znode: " + childNodeName); } - ApplicationId appId = ConverterUtils.toApplicationId(childNodeName); + ApplicationId appId = ApplicationId.fromString(childNodeName); ApplicationStateDataPBImpl appState = new ApplicationStateDataPBImpl( ApplicationStateDataProto.parseFrom(childData)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java index 045c7bdc957..65491026a94 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java @@ -133,7 +133,7 @@ public class DynamicResourceConfiguration extends Configuration { = new HashMap (); for (String node : nodes) { - NodeId nid = ConverterUtils.toNodeId(node); + NodeId nid = NodeId.fromString(node); int vcores = getVcoresPerNode(node); int memory = getMemoryPerNode(node); int overCommitTimeout = getOverCommitTimeoutPerNode(node); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java index e5b1dbcffb6..6d468a48b43 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java @@ -345,7 +345,7 @@ public class RMContainerImpl implements RMContainer, Comparable { logURL.append(WebAppUtils.getHttpSchemePrefix(rmContext .getYarnConfiguration())); logURL.append(WebAppUtils.getRunningLogURL( - container.getNodeHttpAddress(), ConverterUtils.toString(containerId), + container.getNodeHttpAddress(), containerId.toString(), user)); return logURL.toString(); } finally { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java index 9f2f30853ee..c88d866c26a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java @@ -82,8 +82,8 @@ public class RMAppsBlock extends AppsBlock { } AppInfo app = new AppInfo(appReport); - ApplicationAttemptId appAttemptId = - ConverterUtils.toApplicationAttemptId(app.getCurrentAppAttemptId()); + ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString( + app.getCurrentAppAttemptId()); String queuePercent = "N/A"; String clusterPercent = "N/A"; if(appReport.getApplicationResourceUsageReport() != null) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java index de2a23f786f..0f1a590dfbe 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java @@ -220,7 +220,7 @@ public class RMWebAppFilter extends GuiceContainer { break; case "appattempt": try{ - appAttemptId = ConverterUtils.toApplicationAttemptId(parts[3]); + appAttemptId = ApplicationAttemptId.fromString(parts[3]); } catch (IllegalArgumentException e) { LOG.debug("Error parsing {} as an ApplicationAttemptId", parts[3], e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java index a427c336312..66c63ba819d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java @@ -109,6 +109,7 @@ import org.apache.hadoop.yarn.api.records.ReservationRequest; import org.apache.hadoop.yarn.api.records.ReservationRequestInterpreter; import org.apache.hadoop.yarn.api.records.ReservationRequests; import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; @@ -368,7 +369,7 @@ public class RMWebServices { if (sched == null) { throw new NotFoundException("Null ResourceScheduler instance"); } - NodeId nid = ConverterUtils.toNodeId(nodeId); + NodeId nid = NodeId.fromString(nodeId); RMNode ni = this.rm.getRMContext().getRMNodes().get(nid); boolean isInactive = false; if (ni == null) { @@ -1461,9 +1462,7 @@ public class RMWebServices { String error = "Could not parse application id " + newApp.getApplicationId(); try { - appid = - ConverterUtils.toApplicationId(recordFactory, - newApp.getApplicationId()); + appid = ApplicationId.fromString(newApp.getApplicationId()); } catch (Exception e) { throw new BadRequestException(error); } @@ -1536,7 +1535,7 @@ public class RMWebServices { LocalResourceInfo l = entry.getValue(); LocalResource lr = LocalResource.newInstance( - ConverterUtils.getYarnUrlFromURI(l.getUrl()), l.getType(), + URL.fromURI(l.getUrl()), l.getType(), l.getVisibility(), l.getSize(), l.getTimestamp()); hlr.put(entry.getKey(), lr); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java index b6e95a6bb35..5322f4394d0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java @@ -63,7 +63,7 @@ public class AppAttemptInfo { this.nodeId = masterContainer.getNodeId().toString(); this.logsLink = WebAppUtils.getRunningLogURL(schemePrefix + masterContainer.getNodeHttpAddress(), - ConverterUtils.toString(masterContainer.getId()), user); + masterContainer.getId().toString(), user); if (rm.getResourceScheduler() instanceof AbstractYarnScheduler) { AbstractYarnScheduler ayScheduler = (AbstractYarnScheduler) rm.getResourceScheduler(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java index 63b601deab0..c5c02a806de 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java @@ -165,8 +165,7 @@ public class AppInfo { this.amContainerLogsExist = true; this.amContainerLogs = WebAppUtils.getRunningLogURL( schemePrefix + masterContainer.getNodeHttpAddress(), - ConverterUtils.toString(masterContainer.getId()), - app.getUser()); + masterContainer.getId().toString(), app.getUser()); this.amHostHttpAddress = masterContainer.getNodeHttpAddress(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java index 5c69411b981..9eaf43185cd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java @@ -218,7 +218,7 @@ public class TestRMAdminService { fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); @@ -256,7 +256,7 @@ public class TestRMAdminService { fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java index 40811fe64ec..91dcff28ed4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java @@ -181,7 +181,7 @@ public class RMStateStoreTestBase { RMAppAttemptMetrics mockRmAppAttemptMetrics = mock(RMAppAttemptMetrics.class); Container container = new ContainerPBImpl(); - container.setId(ConverterUtils.toContainerId(containerIdStr)); + container.setId(ContainerId.fromString(containerIdStr)); RMAppAttempt mockAttempt = mock(RMAppAttempt.class); when(mockAttempt.getAppAttemptId()).thenReturn(attemptId); when(mockAttempt.getMasterContainer()).thenReturn(container); @@ -225,8 +225,8 @@ public class RMStateStoreTestBase { ClientToAMTokenSecretManagerInRM clientToAMTokenMgr = new ClientToAMTokenSecretManagerInRM(); - ApplicationAttemptId attemptId1 = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0001_000001"); + ApplicationAttemptId attemptId1 = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0001_000001"); ApplicationId appId1 = attemptId1.getApplicationId(); storeApp(store, appId1, submitTime, startTime); verifier.afterStoreApp(store, appId1); @@ -242,8 +242,8 @@ public class RMStateStoreTestBase { appAttemptToken1, clientTokenKey1, dispatcher); String appAttemptIdStr2 = "appattempt_1352994193343_0001_000002"; - ApplicationAttemptId attemptId2 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr2); + ApplicationAttemptId attemptId2 = ApplicationAttemptId.fromString( + appAttemptIdStr2); // create application token and client token key for attempt2 Token appAttemptToken2 = @@ -255,8 +255,8 @@ public class RMStateStoreTestBase { "container_1352994193343_0001_02_000001", appAttemptToken2, clientTokenKey2, dispatcher); - ApplicationAttemptId attemptIdRemoved = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0002_000001"); + ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0002_000001"); ApplicationId appIdRemoved = attemptIdRemoved.getApplicationId(); storeApp(store, appIdRemoved, submitTime, startTime); storeAttempt(store, attemptIdRemoved, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java index a2ff4b3c1cd..5c7299223d9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java @@ -167,7 +167,7 @@ public class TestFSRMStateStore extends RMStateStoreTestBase { (FileSystemRMStateStore) fsTester.getRMStateStore(); String appAttemptIdStr3 = "appattempt_1352994193343_0001_000003"; ApplicationAttemptId attemptId3 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr3); + ApplicationAttemptId.fromString(appAttemptIdStr3); Path appDir = fsTester.store.getAppDir(attemptId3.getApplicationId().toString()); Path tempAppAttemptFile = @@ -347,7 +347,7 @@ public class TestFSRMStateStore extends RMStateStoreTestBase { // imitate appAttemptFile1 is still .new, but old one is deleted String appAttemptIdStr1 = "appattempt_1352994193343_0001_000001"; ApplicationAttemptId attemptId1 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr1); + ApplicationAttemptId.fromString(appAttemptIdStr1); Path appDir = fsTester.store.getAppDir(attemptId1.getApplicationId().toString()); Path appAttemptFile1 = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java index 66b023cc85f..2208f63e659 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java @@ -34,6 +34,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl; @@ -386,14 +387,14 @@ public class TestZKRMStateStore extends RMStateStoreTestBase { // Add a new attempt ClientToAMTokenSecretManagerInRM clientToAMTokenMgr = new ClientToAMTokenSecretManagerInRM(); - ApplicationAttemptId attemptId = ConverterUtils - .toApplicationAttemptId("appattempt_1234567894321_0001_000001"); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString( + "appattempt_1234567894321_0001_000001"); SecretKey clientTokenMasterKey = clientToAMTokenMgr.createMasterKey(attemptId); RMAppAttemptMetrics mockRmAppAttemptMetrics = mock(RMAppAttemptMetrics.class); Container container = new ContainerPBImpl(); - container.setId(ConverterUtils.toContainerId("container_1234567891234_0001_01_000001")); + container.setId(ContainerId.fromString("container_1234567891234_0001_01_000001")); RMAppAttempt mockAttempt = mock(RMAppAttempt.class); when(mockAttempt.getAppAttemptId()).thenReturn(attemptId); when(mockAttempt.getMasterContainer()).thenReturn(container); @@ -478,8 +479,8 @@ public class TestZKRMStateStore extends RMStateStoreTestBase { TestDispatcher dispatcher = new TestDispatcher(); store.setRMDispatcher(dispatcher); - ApplicationAttemptId attemptIdRemoved = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0002_000001"); + ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0002_000001"); ApplicationId appIdRemoved = attemptIdRemoved.getApplicationId(); storeApp(store, appIdRemoved, submitTime, startTime); storeAttempt(store, attemptIdRemoved, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java index f74164f8e31..73e9fc75748 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java @@ -18,32 +18,21 @@ package org.apache.hadoop.yarn.server.resourcemanager.webapp; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; - -import java.io.*; -import java.net.URI; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; - +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.google.inject.Singleton; +import com.google.inject.servlet.GuiceServletContextListener; +import com.google.inject.servlet.ServletModule; +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.ClientResponse.Status; +import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.config.DefaultClientConfig; +import com.sun.jersey.api.client.filter.LoggingFilter; +import com.sun.jersey.api.json.JSONJAXBContext; +import com.sun.jersey.api.json.JSONMarshaller; +import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; +import com.sun.jersey.test.framework.WebAppDescriptor; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; @@ -51,11 +40,13 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.QueueACL; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockNM; @@ -67,12 +58,12 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.Capacity import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairSchedulerConfiguration; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppPriority; +import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppQueue; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppState; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ApplicationSubmissionContextInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CredentialsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.LocalResourceInfo; -import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.*; -import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.JerseyTestBase; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; @@ -91,20 +82,36 @@ import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; -import com.google.inject.Guice; -import com.google.inject.Injector; -import com.google.inject.Singleton; -import com.google.inject.servlet.GuiceServletContextListener; -import com.google.inject.servlet.ServletModule; -import com.sun.jersey.api.client.Client; -import com.sun.jersey.api.client.ClientResponse; -import com.sun.jersey.api.client.ClientResponse.Status; -import com.sun.jersey.api.client.WebResource; -import com.sun.jersey.api.client.filter.LoggingFilter; -import com.sun.jersey.api.json.JSONJAXBContext; -import com.sun.jersey.api.json.JSONMarshaller; -import com.sun.jersey.guice.spi.container.servlet.GuiceContainer; -import com.sun.jersey.test.framework.WebAppDescriptor; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.MediaType; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.ByteArrayInputStream; +import java.io.DataInputStream; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringReader; +import java.io.StringWriter; +import java.net.URI; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeTrue; @RunWith(Parameterized.class) public class TestRMWebServicesAppsModification extends JerseyTestBase { @@ -807,7 +814,7 @@ public class TestRMWebServicesAppsModification extends JerseyTestBase { RMApp app = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appId)); + .get(ApplicationId.fromString(appId)); assertEquals(appName, app.getName()); assertEquals(webserviceUserName, app.getUser()); assertEquals(2, app.getMaxAppAttempts()); @@ -825,8 +832,7 @@ public class TestRMWebServicesAppsModification extends JerseyTestBase { Map appLRs = ctx.getLocalResources(); assertTrue(appLRs.containsKey(lrKey)); LocalResource exampleLR = appLRs.get(lrKey); - assertEquals(ConverterUtils.getYarnUrlFromURI(y.getUrl()), - exampleLR.getResource()); + assertEquals(URL.fromURI(y.getUrl()), exampleLR.getResource()); assertEquals(y.getSize(), exampleLR.getSize()); assertEquals(y.getTimestamp(), exampleLR.getTimestamp()); assertEquals(y.getType(), exampleLR.getType()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java index 36e24ecf189..4e26bd1ce6e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java @@ -50,6 +50,7 @@ import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; @@ -236,11 +237,11 @@ public class TestRMWebServicesDelegationTokenAuthentication { boolean appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); RMApp actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); String owner = actualApp.getUser(); assertEquals("client", owner); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java index 2f6a02287c4..249e8250497 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java @@ -37,6 +37,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.KerberosTestUtils; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -236,11 +237,11 @@ public class TestRMWebappAuthentication { assertEquals(Status.ACCEPTED.getStatusCode(), conn.getResponseCode()); boolean appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); RMApp actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); String owner = actualApp.getUser(); assertEquals( rm.getConfig().get(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, @@ -259,11 +260,11 @@ public class TestRMWebappAuthentication { conn.getInputStream(); appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); owner = actualApp.getUser(); assertEquals("client", owner); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java index 231ca7241af..958b54e623b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java @@ -481,7 +481,7 @@ public class EntityGroupFSTimelineStore extends CompositeService ApplicationId appId = null; if (appIdStr.startsWith(ApplicationId.appIdStrPrefix)) { try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (IllegalArgumentException e) { appId = null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java index db241a891d2..884b5cd18aa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java @@ -34,15 +34,16 @@ class EntityGroupPlugInForTest extends TimelineEntityGroupPlugin { public Set getTimelineEntityGroupId(String entityType, NameValuePair primaryFilter, Collection secondaryFilters) { - ApplicationId appId - = ConverterUtils.toApplicationId(primaryFilter.getValue().toString()); + ApplicationId appId = ApplicationId.fromString( + primaryFilter.getValue().toString()); return Sets.newHashSet(getStandardTimelineGroupId(appId)); } @Override public Set getTimelineEntityGroupId(String entityId, String entityType) { - ApplicationId appId = ConverterUtils.toApplicationId(entityId); + ApplicationId appId = ApplicationId.fromString( + entityId); return Sets.newHashSet(getStandardTimelineGroupId(appId)); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java index d6baab67370..1c12f36192b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java @@ -68,7 +68,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { private static final String SAMPLE_APP_PREFIX_CACHE_TEST = "1234_000"; private static final int CACHE_TEST_CACHE_SIZE = 5; - + private static final String TEST_SUMMARY_LOG_FILE_NAME = EntityGroupFSTimelineStore.SUMMARY_LOG_PREFIX + "test"; private static final String TEST_DOMAIN_LOG_FILE_NAME @@ -117,7 +117,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { sampleAppIds = new ArrayList<>(CACHE_TEST_CACHE_SIZE + 1); for (int i = 0; i < CACHE_TEST_CACHE_SIZE + 1; i++) { - ApplicationId appId = ConverterUtils.toApplicationId( + ApplicationId appId = ApplicationId.fromString( ConverterUtils.APPLICATION_PREFIX + "_" + SAMPLE_APP_PREFIX_CACHE_TEST + i); sampleAppIds.add(appId);