From 863bfa4d6a6c9d9c940923f5eb300c7df76648ad Mon Sep 17 00:00:00 2001 From: Wangda Tan Date: Tue, 14 Jun 2016 15:12:00 -0700 Subject: [PATCH] YARN-1942. Deprecate toString/fromString methods from ConverterUtils and move them to records classes like ContainerId/ApplicationId, etc. (wangda) --- .../org/apache/hadoop/mapred/YarnChild.java | 10 +- .../hadoop/mapreduce/v2/app/MRAppMaster.java | 2 +- .../v2/app/job/impl/TaskAttemptImpl.java | 7 +- .../v2/app/webapp/dao/TaskAttemptInfo.java | 4 +- .../apache/hadoop/mapreduce/v2/app/MRApp.java | 2 +- .../mapreduce/v2/app/TestMRAppMaster.java | 66 +++--- .../app/commit/TestCommitterEventHandler.java | 12 +- .../v2/app/job/impl/TestJobImpl.java | 4 +- .../app/webapp/TestAMWebServicesAttempts.java | 2 +- .../mapred/LocalDistributedCacheManager.java | 2 +- .../hadoop/mapreduce/v2/util/MRApps.java | 8 +- .../mapreduce/jobhistory/AMStartedEvent.java | 6 +- .../jobhistory/TaskAttemptStartedEvent.java | 4 +- .../hs/webapp/TestHsWebServicesAttempts.java | 2 +- .../org/apache/hadoop/mapred/YARNRunner.java | 5 +- .../hadoop/mapreduce/v2/TestMRJobs.java | 2 +- .../apache/hadoop/mapred/ShuffleHandler.java | 2 +- .../hadoop/tools/HadoopArchiveLogs.java | 3 +- .../api/records/ApplicationAttemptId.java | 36 ++- .../yarn/api/records/ApplicationId.java | 38 +++- .../hadoop/yarn/api/records/ContainerId.java | 6 +- .../hadoop/yarn/api/records/NodeId.java | 23 +- .../apache/hadoop/yarn/api/records/URL.java | 49 +++++ .../distributedshell/ApplicationMaster.java | 7 +- .../applications/distributedshell/Client.java | 3 +- .../DistributedShellTimelinePlugin.java | 4 +- .../TestDistributedShell.java | 4 +- .../yarn/client/cli/ApplicationCLI.java | 27 ++- .../hadoop/yarn/client/cli/LogsCLI.java | 6 +- .../hadoop/yarn/client/cli/NodeCLI.java | 2 +- .../hadoop/yarn/client/cli/RMAdminCLI.java | 2 +- .../yarn/client/cli/TestRMAdminCLI.java | 2 +- .../AggregatedLogDeletionService.java | 2 +- .../logaggregation/AggregatedLogFormat.java | 11 +- .../yarn/logaggregation/LogCLIHelpers.java | 2 +- .../hadoop/yarn/util/ConverterUtils.java | 206 ++++++------------ .../apache/hadoop/yarn/util/FSDownload.java | 2 +- .../yarn/webapp/log/AggregatedLogsBlock.java | 4 +- .../hadoop/yarn/webapp/util/WebAppUtils.java | 2 +- .../hadoop/yarn/util/TestConverterUtils.java | 30 +-- .../hadoop/yarn/util/TestFSDownload.java | 13 +- ...licationHistoryManagerOnTimelineStore.java | 25 +-- .../FileSystemApplicationHistoryStore.java | 8 +- .../yarn/server/utils/BuilderUtils.java | 2 +- .../yarn/server/webapp/AppAttemptBlock.java | 2 +- .../yarn/server/webapp/ContainerBlock.java | 2 +- .../yarn/server/webapp/WebServices.java | 6 +- .../nodemanager/DefaultContainerExecutor.java | 5 +- .../nodemanager/DockerContainerExecutor.java | 6 +- .../nodemanager/LinuxContainerExecutor.java | 2 +- .../container/ContainerImpl.java | 2 +- .../launcher/ContainerLaunch.java | 10 +- .../launcher/ContainerRelaunch.java | 2 +- .../launcher/RecoveredContainerLaunch.java | 6 +- .../localizer/ContainerLocalizer.java | 3 +- .../localizer/LocalResourceRequest.java | 4 +- .../ResourceLocalizationService.java | 28 +-- .../event/LocalizerResourceRequestEvent.java | 2 +- .../sharedcache/SharedCacheUploader.java | 2 +- .../logaggregation/AppLogAggregatorImpl.java | 2 +- .../recovery/NMLeveldbStateStoreService.java | 10 +- .../util/NodeManagerBuilderUtils.java | 2 +- .../nodemanager/util/ProcessIdFileReader.java | 3 +- .../nodemanager/webapp/ApplicationPage.java | 5 +- .../nodemanager/webapp/ContainerLogsPage.java | 2 +- .../webapp/ContainerLogsUtils.java | 6 +- .../nodemanager/webapp/ContainerPage.java | 2 +- .../nodemanager/webapp/NMWebServices.java | 4 +- .../nodemanager/webapp/dao/AppInfo.java | 4 +- .../nodemanager/TestNodeManagerReboot.java | 2 +- .../nodemanager/TestNodeManagerResync.java | 2 +- .../nodemanager/TestNodeManagerShutdown.java | 2 +- .../impl/pb/TestPBRecordImpl.java | 10 +- .../TestContainerManager.java | 22 +- .../TestContainerManagerRecovery.java | 2 +- .../launcher/TestContainerLaunch.java | 8 +- .../localizer/TestContainerLocalizer.java | 2 +- .../localizer/TestLocalResource.java | 7 +- .../TestResourceLocalizationService.java | 21 +- .../TestAppLogAggregatorImpl.java | 14 +- .../TestLogAggregationService.java | 36 +-- .../monitor/TestContainersMonitor.java | 2 +- .../TestNMLeveldbStateStoreService.java | 24 +- .../nodemanager/webapp/TestNMWebServer.java | 2 +- .../webapp/TestNMWebServicesContainers.java | 7 +- .../resourcemanager/ResourceManager.java | 2 +- .../recovery/LeveldbRMStateStore.java | 5 +- .../recovery/ZKRMStateStore.java | 2 +- .../DynamicResourceConfiguration.java | 2 +- .../rmcontainer/RMContainerImpl.java | 2 +- .../resourcemanager/webapp/RMAppsBlock.java | 4 +- .../webapp/RMWebAppFilter.java | 2 +- .../resourcemanager/webapp/RMWebServices.java | 9 +- .../webapp/dao/AppAttemptInfo.java | 2 +- .../resourcemanager/webapp/dao/AppInfo.java | 3 +- .../resourcemanager/TestRMAdminService.java | 8 +- .../recovery/RMStateStoreTestBase.java | 14 +- .../recovery/TestFSRMStateStore.java | 4 +- .../recovery/TestZKRMStateStore.java | 11 +- .../TestRMWebServicesAppsModification.java | 7 +- ...ServicesDelegationTokenAuthentication.java | 5 +- .../webapp/TestRMWebappAuthentication.java | 9 +- .../timeline/EntityGroupFSTimelineStore.java | 2 +- .../timeline/EntityGroupPlugInForTest.java | 7 +- .../TestEntityGroupFSTimelineStore.java | 4 +- 105 files changed, 561 insertions(+), 487 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java index ec7ade7daa5..164f19dc2f1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java @@ -58,6 +58,7 @@ import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.ConverterUtils; /** @@ -290,11 +291,10 @@ private static void configureLocalDirs(Task task, JobConf job) throws IOExceptio private static void configureTask(JobConf job, Task task, Credentials credentials, Token jt) throws IOException { job.setCredentials(credentials); - - ApplicationAttemptId appAttemptId = - ConverterUtils.toContainerId( - System.getenv(Environment.CONTAINER_ID.name())) - .getApplicationAttemptId(); + + ApplicationAttemptId appAttemptId = ContainerId.fromString( + System.getenv(Environment.CONTAINER_ID.name())) + .getApplicationAttemptId(); LOG.debug("APPLICATION_ATTEMPT_ID: " + appAttemptId); // Set it in conf, so as to be able to be used the the OutputCommitter. job.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index cbd41a836f0..5e597bb33ad 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -1544,7 +1544,7 @@ public static void main(String[] args) { validateInputParam(appSubmitTimeStr, ApplicationConstants.APP_SUBMIT_TIME_ENV); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); ApplicationAttemptId applicationAttemptId = containerId.getApplicationAttemptId(); if (applicationAttemptId != null) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index fec035421d4..014557dc8bc 100755 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -721,8 +721,7 @@ private static LocalResource createLocalResource(FileSystem fc, Path file, LocalResourceType type, LocalResourceVisibility visibility) throws IOException { FileStatus fstat = fc.getFileStatus(file); - URL resourceURL = ConverterUtils.getYarnUrlFromPath(fc.resolvePath(fstat - .getPath())); + URL resourceURL = URL.fromPath(fc.resolvePath(fstat.getPath())); long resourceSize = fstat.getLen(); long resourceModificationTime = fstat.getModificationTime(); @@ -1263,8 +1262,8 @@ public void setAvataar(Avataar avataar) { public TaskAttemptStateInternal recover(TaskAttemptInfo taInfo, OutputCommitter committer, boolean recoverOutput) { ContainerId containerId = taInfo.getContainerId(); - NodeId containerNodeId = ConverterUtils.toNodeId(taInfo.getHostname() + ":" - + taInfo.getPort()); + NodeId containerNodeId = NodeId.fromString( + taInfo.getHostname() + ":" + taInfo.getPort()); String nodeHttpAddress = StringInterner.weakIntern(taInfo.getHostname() + ":" + taInfo.getHttpPort()); // Resource/Priority/Tokens are only needed while launching the container on diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java index d8e89b1cbc9..892c6269619 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java @@ -69,8 +69,10 @@ public TaskAttemptInfo(TaskAttempt ta, TaskType type, Boolean isRunning) { this.nodeHttpAddress = ta.getNodeHttpAddress(); this.startTime = report.getStartTime(); this.finishTime = report.getFinishTime(); - this.assignedContainerId = ConverterUtils.toString(report.getContainerId()); this.assignedContainer = report.getContainerId(); + if (assignedContainer != null) { + this.assignedContainerId = assignedContainer.toString(); + } this.progress = report.getProgress() * 100; this.status = report.getStateString(); this.state = report.getTaskAttemptState(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index e60d904d5cc..6ff243de724 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -253,7 +253,7 @@ public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId, // the job can reaches the final state when MRAppMaster shuts down. this.successfullyUnregistered.set(unregistered); this.assignedQueue = assignedQueue; - this.resource = Resource.newInstance(1234, 2); + this.resource = Resource.newInstance(1234L, 2L); } @Override diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java index 78a6178bc6b..1ea290a72c4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java @@ -114,7 +114,7 @@ public static void setup() throws AccessControlException, localFS.delete(testDir, true); new File(testDir.toString()).mkdir(); } - + @Before public void prepare() throws IOException { File dir = new File(stagingDir); @@ -134,11 +134,11 @@ public void testMRAppMasterForDifferentUser() throws IOException, InterruptedException { String applicationAttemptIdStr = "appattempt_1317529182569_0004_000001"; String containerIdStr = "container_1317529182569_0004_000001_1"; - + String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis()); @@ -159,15 +159,15 @@ public void testMRAppMasterMidLock() throws IOException, String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); FileSystem fs = FileSystem.get(conf); //Create the file, but no end file so we should unregister with an error. fs.create(start).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -198,8 +198,8 @@ public void testMRAppMasterJobLaunchTime() throws IOException, conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); conf.setInt(MRJobConfig.NUM_REDUCES, 0); conf.set(JHAdminConfig.MR_HS_JHIST_FORMAT, "json"); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); @@ -217,7 +217,7 @@ public void testMRAppMasterJobLaunchTime() throws IOException, FileSystem fs = FileSystem.get(conf); JobSplitWriter.createSplitFiles(new Path(dir.getAbsolutePath()), conf, fs, new org.apache.hadoop.mapred.InputSplit[0]); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMasterTestLaunchTime appMaster = new MRAppMasterTestLaunchTime(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis()); @@ -235,8 +235,8 @@ public void testMRAppMasterSuccessLock() throws IOException, String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); @@ -244,7 +244,7 @@ public void testMRAppMasterSuccessLock() throws IOException, FileSystem fs = FileSystem.get(conf); fs.create(start).close(); fs.create(end).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -264,7 +264,7 @@ public void testMRAppMasterSuccessLock() throws IOException, // verify the final status is SUCCEEDED verifyFailedStatus((MRAppMasterTest)appMaster, "SUCCEEDED"); } - + @Test public void testMRAppMasterFailLock() throws IOException, InterruptedException { @@ -273,8 +273,8 @@ public void testMRAppMasterFailLock() throws IOException, String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(applicationAttemptId.getApplicationId())); Path start = MRApps.getStartJobCommitFile(conf, userName, jobId); @@ -282,7 +282,7 @@ public void testMRAppMasterFailLock() throws IOException, FileSystem fs = FileSystem.get(conf); fs.create(start).close(); fs.create(end).close(); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -302,7 +302,7 @@ public void testMRAppMasterFailLock() throws IOException, // verify the final status is FAILED verifyFailedStatus((MRAppMasterTest)appMaster, "FAILED"); } - + @Test public void testMRAppMasterMissingStaging() throws IOException, InterruptedException { @@ -311,16 +311,16 @@ public void testMRAppMasterMissingStaging() throws IOException, String userName = "TestAppMasterUser"; JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); //Delete the staging directory File dir = new File(stagingDir); if(dir.exists()) { FileUtils.deleteDirectory(dir); } - - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + + ContainerId containerId = ContainerId.fromString(containerIdStr); MRAppMaster appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, false); @@ -351,9 +351,9 @@ public void testMRAppMasterMaxAppAttempts() throws IOException, String containerIdStr = "container_1317529182569_0004_000002_1"; String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); @@ -425,7 +425,7 @@ public void testMRAppMasterCredentials() throws Exception { new Token(identifier, password, AMRMTokenIdentifier.KIND_NAME, appTokenService); credentials.addToken(appTokenService, appToken); - + Text keyAlias = new Text("mySecretKeyAlias"); credentials.addSecretKey(keyAlias, "mySecretKey".getBytes()); Token storedToken = @@ -486,7 +486,7 @@ public void testMRAppMasterCredentials() throws Exception { Assert.assertEquals(storedToken, confCredentials.getToken(tokenAlias)); Assert.assertEquals("mySecretKey", new String(confCredentials.getSecretKey(keyAlias))); - + // Verify the AM's ugi - app token should be present Credentials ugiCredentials = appMaster.getUgi().getCredentials(); Assert.assertEquals(1, ugiCredentials.numberOfSecretKeys()); @@ -505,9 +505,9 @@ public void testMRAppMasterShutDownJob() throws Exception, String applicationAttemptIdStr = "appattempt_1317529182569_0004_000002"; String containerIdStr = "container_1317529182569_0004_000002_1"; String userName = "TestAppMasterUser"; - ApplicationAttemptId applicationAttemptId = ConverterUtils - .toApplicationAttemptId(applicationAttemptIdStr); - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.fromString( + applicationAttemptIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); JobConf conf = new JobConf(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); @@ -589,7 +589,7 @@ protected void serviceInit(Configuration conf) throws Exception { } this.conf = conf; } - + @Override protected ContainerAllocator createContainerAllocator( final ClientService clientService, final AppContext context) { @@ -626,7 +626,7 @@ protected void serviceStart() throws Exception { public Credentials getCredentials() { return super.getCredentials(); } - + public UserGroupInformation getUgi() { return currentUser; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java index a4853d5e428..b099bcce23c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java @@ -129,8 +129,8 @@ public void testCommitWindow() throws Exception { SystemClock clock = SystemClock.getInstance(); AppContext appContext = mock(AppContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); when(appContext.getApplicationID()).thenReturn(attemptid.getApplicationId()); when(appContext.getApplicationAttemptId()).thenReturn(attemptid); when(appContext.getEventHandler()).thenReturn( @@ -240,8 +240,8 @@ public void testBasic() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); @@ -288,8 +288,8 @@ public void testFailure() throws Exception { YarnConfiguration conf = new YarnConfiguration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir); JobContext mockJobContext = mock(JobContext.class); - ApplicationAttemptId attemptid = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId attemptid = + ApplicationAttemptId.fromString("appattempt_1234567890000_0001_0"); JobId jobId = TypeConverter.toYarn( TypeConverter.fromYarn(attemptid.getApplicationId())); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java index 36221e0500d..eaa5af76d5b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java @@ -942,8 +942,8 @@ public void runOnNextHeartbeat(Runnable callback) { callback.run(); } }; - ApplicationAttemptId id = - ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0"); + ApplicationAttemptId id = ApplicationAttemptId.fromString( + "appattempt_1234567890000_0001_0"); when(appContext.getApplicationID()).thenReturn(id.getApplicationId()); when(appContext.getApplicationAttemptId()).thenReturn(id); CommitterEventHandler handler = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java index dcd5d2954ba..3c9127fd116 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java @@ -515,7 +515,7 @@ public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype, WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics); WebServicesTestUtils.checkStringMatch("assignedContainerId", - ConverterUtils.toString(ta.getAssignedContainerID()), + ta.getAssignedContainerID().toString(), assignedContainerId); assertEquals("startTime wrong", ta.getLaunchTime(), startTime); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java index 3b87197e1b9..c58a774f87b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalDistributedCacheManager.java @@ -157,7 +157,7 @@ public void setup(JobConf conf) throws IOException { } Path resourcePath; try { - resourcePath = ConverterUtils.getPathFromYarnURL(resource.getResource()); + resourcePath = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException(e); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index feea789efcc..8ca1a9d3f9a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.util.Apps; @@ -608,8 +609,7 @@ private static void parseDistributedCacheArtifacts( } String linkName = name.toUri().getPath(); LocalResource orig = localResources.get(linkName); - org.apache.hadoop.yarn.api.records.URL url = - ConverterUtils.getYarnUrlFromURI(p.toUri()); + URL url = URL.fromURI(p.toUri()); if(orig != null && !orig.getResource().equals(url)) { LOG.warn( getResourceDescription(orig.getType()) + @@ -618,8 +618,8 @@ private static void parseDistributedCacheArtifacts( " This will be an error in Hadoop 2.0"); continue; } - localResources.put(linkName, LocalResource.newInstance(ConverterUtils - .getYarnUrlFromURI(p.toUri()), type, visibilities[i] + localResources.put(linkName, LocalResource + .newInstance(URL.fromURI(p.toUri()), type, visibilities[i] ? LocalResourceVisibility.PUBLIC : LocalResourceVisibility.PRIVATE, sizes[i], timestamps[i])); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java index ea2ca9e90fd..266aa94f0cb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java @@ -107,8 +107,8 @@ public void setDatum(Object datum) { * @return the ApplicationAttemptId */ public ApplicationAttemptId getAppAttemptId() { - return ConverterUtils.toApplicationAttemptId(datum.getApplicationAttemptId() - .toString()); + return ApplicationAttemptId.fromString( + datum.getApplicationAttemptId().toString()); } /** @@ -122,7 +122,7 @@ public long getStartTime() { * @return the ContainerId for the MRAppMaster. */ public ContainerId getContainerId() { - return ConverterUtils.toContainerId(datum.getContainerId().toString()); + return ContainerId.fromString(datum.getContainerId().toString()); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java index c8c250a6078..3073d5b95f1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java @@ -75,7 +75,7 @@ public TaskAttemptStartedEvent(TaskAttemptID attemptId, TaskType taskType, long startTime, String trackerName, int httpPort, int shufflePort, String locality, String avataar) { this(attemptId, taskType, startTime, trackerName, httpPort, shufflePort, - ConverterUtils.toContainerId("container_-1_-1_-1_-1"), locality, + ContainerId.fromString("container_-1_-1_-1_-1"), locality, avataar); } @@ -116,7 +116,7 @@ public EventType getEventType() { } /** Get the ContainerId */ public ContainerId getContainerId() { - return ConverterUtils.toContainerId(datum.getContainerId().toString()); + return ContainerId.fromString(datum.getContainerId().toString()); } /** Get the locality */ public String getLocality() { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java index 60dc235d684..54c2792b12b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java @@ -533,7 +533,7 @@ public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype, WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag, diagnostics); WebServicesTestUtils.checkStringMatch("assignedContainerId", - ConverterUtils.toString(ta.getAssignedContainerID()), + ta.getAssignedContainerID().toString(), assignedContainerId); assertEquals("startTime wrong", ta.getLaunchTime(), startTime); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 1342282784f..b30641ebb22 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -321,7 +321,7 @@ private LocalResource createApplicationResource(FileContext fs, Path p, LocalRes throws IOException { LocalResource rsrc = recordFactory.newRecordInstance(LocalResource.class); FileStatus rsrcStat = fs.getFileStatus(p); - rsrc.setResource(ConverterUtils.getYarnUrlFromPath(fs + rsrc.setResource(URL.fromPath(fs .getDefaultFileSystem().resolvePath(rsrcStat.getPath()))); rsrc.setSize(rsrcStat.getLen()); rsrc.setTimestamp(rsrcStat.getModificationTime()); @@ -355,8 +355,7 @@ public ApplicationSubmissionContext createApplicationSubmissionContext( Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE); - URL yarnUrlForJobSubmitDir = ConverterUtils - .getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem() + URL yarnUrlForJobSubmitDir = URL.fromPath(defaultFileContext.getDefaultFileSystem() .resolvePath( defaultFileContext.makeQualified(new Path(jobSubmitDir)))); LOG.debug("Creating setup context, jobSubmitDir url is " diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java index a6647f19daa..900bdeb20ee 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java @@ -749,7 +749,7 @@ public void testContainerRollingLog() throws IOException, boolean foundAppMaster = job.isUber(); final Path containerPathComponent = slog.getPath().getParent(); if (!foundAppMaster) { - final ContainerId cid = ConverterUtils.toContainerId( + final ContainerId cid = ContainerId.fromString( containerPathComponent.getName()); foundAppMaster = ((cid.getContainerId() & ContainerId.CONTAINER_ID_BITMASK)== 1); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java index 0d6e900b393..8cbae819958 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java @@ -999,7 +999,7 @@ private String getBaseLocation(String jobId, String user) { final String baseStr = ContainerLocalizer.USERCACHE + "/" + user + "/" + ContainerLocalizer.APPCACHE + "/" - + ConverterUtils.toString(appID) + "/output" + "/"; + + appID.toString() + "/output" + "/"; return baseStr; } diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index 6b8af97e9c4..2d3e43b1ff6 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -39,6 +39,7 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.LogAggregationStatus; import org.apache.hadoop.yarn.applications.distributedshell.ApplicationMaster; @@ -302,7 +303,7 @@ void filterAppsByAggregatedStatus() throws IOException, YarnException { AppInfo app = it.next(); try { ApplicationReport report = client.getApplicationReport( - ConverterUtils.toApplicationId(app.getAppId())); + ApplicationId.fromString(app.getAppId())); LogAggregationStatus aggStatus = report.getLogAggregationStatus(); if (aggStatus.equals(LogAggregationStatus.RUNNING) || aggStatus.equals(LogAggregationStatus.RUNNING_WITH_FAILURE) || diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java index 0a83bc047aa..5f3a68ebe1a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.api.records; import java.text.NumberFormat; +import java.util.Iterator; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -26,6 +28,8 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.util.Records; +import com.google.common.base.Splitter; + /** *

ApplicationAttemptId denotes the particular attempt * of an ApplicationMaster for a given {@link ApplicationId}.

@@ -38,10 +42,11 @@ @Stable public abstract class ApplicationAttemptId implements Comparable { + private static Splitter _spliter = Splitter.on('_').trimResults(); @Private @Unstable - public static final String appAttemptIdStrPrefix = "appattempt_"; + public static final String appAttemptIdStrPrefix = "appattempt"; @Public @Unstable @@ -131,6 +136,7 @@ public int compareTo(ApplicationAttemptId other) { @Override public String toString() { StringBuilder sb = new StringBuilder(appAttemptIdStrPrefix); + sb.append("_"); sb.append(this.getApplicationId().getClusterTimestamp()).append("_"); sb.append(ApplicationId.appIdFormat.get().format( this.getApplicationId().getId())); @@ -139,4 +145,32 @@ public String toString() { } protected abstract void build(); + + @Public + @Stable + public static ApplicationAttemptId fromString(String applicationAttemptIdStr) { + Iterator it = _spliter.split(applicationAttemptIdStr).iterator(); + if (!it.next().equals(appAttemptIdStrPrefix)) { + throw new IllegalArgumentException("Invalid AppAttemptId prefix: " + + applicationAttemptIdStr); + } + try { + return toApplicationAttemptId(it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid AppAttemptId: " + + applicationAttemptIdStr, n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid AppAttemptId: " + + applicationAttemptIdStr, e); + } + } + + private static ApplicationAttemptId toApplicationAttemptId( + Iterator it) throws NumberFormatException { + ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), + Integer.parseInt(it.next())); + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next())); + return appAttemptId; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java index 90214cd8fca..03a77ce309f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.api.records; import java.text.NumberFormat; +import java.util.Iterator; +import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -26,6 +28,8 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.util.Records; +import com.google.common.base.Splitter; + /** *

ApplicationId represents the globally unique * identifier for an application.

@@ -38,10 +42,11 @@ @Public @Stable public abstract class ApplicationId implements Comparable { + private static Splitter _spliter = Splitter.on('_').trimResults(); @Private @Unstable - public static final String appIdStrPrefix = "application_"; + public static final String appIdStrPrefix = "application"; @Public @Unstable @@ -105,8 +110,35 @@ public int compareTo(ApplicationId other) { @Override public String toString() { - return appIdStrPrefix + this.getClusterTimestamp() + "_" - + appIdFormat.get().format(getId()); + return appIdStrPrefix + "_" + this.getClusterTimestamp() + "_" + appIdFormat + .get().format(getId()); + } + + private static ApplicationId toApplicationId( + Iterator it) throws NumberFormatException { + ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), + Integer.parseInt(it.next())); + return appId; + } + + @Public + @Stable + public static ApplicationId fromString(String appIdStr) { + Iterator it = _spliter.split((appIdStr)).iterator(); + if (!it.next().equals(appIdStrPrefix)) { + throw new IllegalArgumentException("Invalid ApplicationId prefix: " + + appIdStr + ". The valid ApplicationId should start with prefix " + + appIdStrPrefix); + } + try { + return toApplicationId(it); + } catch (NumberFormatException n) { + throw new IllegalArgumentException("Invalid ApplicationId: " + + appIdStr, n); + } catch (NoSuchElementException e) { + throw new IllegalArgumentException("Invalid ApplicationId: " + + appIdStr, e); + } } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java index f332651daf2..feddeca9e70 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java @@ -42,7 +42,7 @@ public abstract class ContainerId implements Comparable{ private static final String CONTAINER_PREFIX = "container"; private static final String EPOCH_PREFIX = "e"; - @Private + @Public @Unstable public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, long containerId) { @@ -97,7 +97,7 @@ public static ContainerId newInstance(ApplicationAttemptId appAttemptId, */ @Public @Deprecated - @Stable + @Unstable public abstract int getId(); /** @@ -205,7 +205,7 @@ public String toString() { } @Public - @Unstable + @Stable public static ContainerId fromString(String containerIdStr) { Iterator it = _SPLITTER.split(containerIdStr).iterator(); if (!it.next().equals(CONTAINER_PREFIX)) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java index c3f859598f3..a0b87a7be62 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java @@ -20,8 +20,8 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; -import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.yarn.util.Records; /** @@ -35,8 +35,8 @@ @Stable public abstract class NodeId implements Comparable { - @Private - @Unstable + @Public + @Stable public static NodeId newInstance(String host, int port) { NodeId nodeId = Records.newRecord(NodeId.class); nodeId.setHost(host); @@ -112,6 +112,23 @@ public int compareTo(NodeId other) { } return hostCompare; } + + @Public + @Stable + public static NodeId fromString(String nodeIdStr) { + String[] parts = nodeIdStr.split(":"); + if (parts.length != 2) { + throw new IllegalArgumentException("Invalid NodeId [" + nodeIdStr + + "]. Expected host:port"); + } + try { + NodeId nodeId = + NodeId.newInstance(parts[0].trim(), Integer.parseInt(parts[1])); + return nodeId; + } catch (NumberFormatException e) { + throw new IllegalArgumentException("Invalid port: " + parts[1], e); + } + } protected abstract void build(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java index 4261117b108..aa28585ab17 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/URL.java @@ -18,8 +18,13 @@ package org.apache.hadoop.yarn.api.records; +import java.net.URI; +import java.net.URISyntaxException; + import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.util.Records; /** @@ -119,4 +124,48 @@ public static URL newInstance(String scheme, String host, int port, String file) @Public @Stable public abstract void setFile(String file); + + @Public + @Stable + public Path toPath() throws URISyntaxException { + String scheme = getScheme() == null ? "" : getScheme(); + + String authority = ""; + if (getHost() != null) { + authority = getHost(); + if (getUserInfo() != null) { + authority = getUserInfo() + "@" + authority; + } + if (getPort() > 0) { + authority += ":" + getPort(); + } + } + + return new Path( + (new URI(scheme, authority, getFile(), null, null)).normalize()); + } + + @Public + @Stable + public static URL fromURI(URI uri) { + URL url = + RecordFactoryProvider.getRecordFactory(null).newRecordInstance( + URL.class); + if (uri.getHost() != null) { + url.setHost(uri.getHost()); + } + if (uri.getUserInfo() != null) { + url.setUserInfo(uri.getUserInfo()); + } + url.setPort(uri.getPort()); + url.setScheme(uri.getScheme()); + url.setFile(uri.getPath()); + return url; + } + + @Public + @Stable + public static URL fromPath(Path path) { + return fromURI(path.toUri()); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 5e2c90b4291..703595c9a5c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -435,13 +435,13 @@ public boolean init(String[] args) throws ParseException, IOException { if (!envs.containsKey(Environment.CONTAINER_ID.name())) { if (cliParser.hasOption("app_attempt_id")) { String appIdStr = cliParser.getOptionValue("app_attempt_id", ""); - appAttemptID = ConverterUtils.toApplicationAttemptId(appIdStr); + appAttemptID = ApplicationAttemptId.fromString(appIdStr); } else { throw new IllegalArgumentException( "Application Attempt Id not set in the environment"); } } else { - ContainerId containerId = ConverterUtils.toContainerId(envs + ContainerId containerId = ContainerId.fromString(envs .get(Environment.CONTAINER_ID.name())); appAttemptID = containerId.getApplicationAttemptId(); } @@ -1048,8 +1048,7 @@ public void run() { URL yarnUrl = null; try { - yarnUrl = ConverterUtils.getYarnUrlFromURI( - new URI(renamedScriptPath.toString())); + yarnUrl = URL.fromURI(new URI(renamedScriptPath.toString())); } catch (URISyntaxException e) { LOG.error("Error when trying to use shell script path specified" + " in env, path=" + renamedScriptPath, e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java index 5adc37d825c..9879b1e9ddf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java @@ -68,6 +68,7 @@ import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; @@ -857,7 +858,7 @@ private void addToLocalResources(FileSystem fs, String fileSrcPath, FileStatus scFileStatus = fs.getFileStatus(dst); LocalResource scRsrc = LocalResource.newInstance( - ConverterUtils.getYarnUrlFromURI(dst.toUri()), + URL.fromURI(dst.toUri()), LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime()); localResources.put(fileDstPath, scRsrc); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java index 55fbd60b188..119fa6f3bd5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/DistributedShellTimelinePlugin.java @@ -53,7 +53,7 @@ public Set getTimelineEntityGroupId(String entityType, public Set getTimelineEntityGroupId(String entityId, String entityType) { if (ApplicationMaster.DSEntity.DS_CONTAINER.toString().equals(entityId)) { - ContainerId containerId = ConverterUtils.toContainerId(entityId); + ContainerId containerId = ContainerId.fromString(entityId); ApplicationId appId = containerId.getApplicationAttemptId() .getApplicationId(); return toEntityGroupId(appId.toString()); @@ -69,7 +69,7 @@ public Set getTimelineEntityGroupId(String entityType, } private Set toEntityGroupId(String strAppId) { - ApplicationId appId = ConverterUtils.toApplicationId(strAppId); + ApplicationId appId = ApplicationId.fromString(strAppId); TimelineEntityGroupId groupId = TimelineEntityGroupId.newInstance( appId, ApplicationMaster.CONTAINER_ENTITY_GROUP_ID); Set result = new HashSet<>(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java index 2b46fca4b45..9448cf14bc3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java @@ -371,8 +371,8 @@ public void run() { } String currAttemptEntityId = entitiesAttempts.getEntities().get(0).getEntityId(); - ApplicationAttemptId attemptId - = ConverterUtils.toApplicationAttemptId(currAttemptEntityId); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString( + currAttemptEntityId); NameValuePair primaryFilter = new NameValuePair( ApplicationMaster.APPID_TIMELINE_FILTER_NAME, attemptId.getApplicationId().toString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java index d9e9fa6f943..865ce005fcb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java @@ -301,7 +301,7 @@ public int run(String[] args) throws Exception { */ private void signalToContainer(String containerIdStr, SignalContainerCommand command) throws YarnException, IOException { - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); sysout.println("Signalling container " + containerIdStr); client.signalToContainer(containerId, command); } @@ -327,8 +327,8 @@ private int printApplicationAttemptReport(String applicationAttemptId) throws YarnException, IOException { ApplicationAttemptReport appAttemptReport = null; try { - appAttemptReport = client.getApplicationAttemptReport(ConverterUtils - .toApplicationAttemptId(applicationAttemptId)); + appAttemptReport = client.getApplicationAttemptReport( + ApplicationAttemptId.fromString(applicationAttemptId)); } catch (ApplicationNotFoundException e) { sysout.println("Application for AppAttempt with id '" + applicationAttemptId + "' doesn't exist in RM or Timeline Server."); @@ -384,8 +384,7 @@ private int printContainerReport(String containerId) throws YarnException, IOException { ContainerReport containerReport = null; try { - containerReport = client.getContainerReport((ConverterUtils - .toContainerId(containerId))); + containerReport = client.getContainerReport(ContainerId.fromString(containerId)); } catch (ApplicationNotFoundException e) { sysout.println("Application for Container with id '" + containerId + "' doesn't exist in RM or Timeline Server."); @@ -515,7 +514,7 @@ private int killApplication(String[] applicationIds) throws YarnException, */ private void killApplication(String applicationId) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); ApplicationReport appReport = null; try { appReport = client.getApplicationReport(appId); @@ -540,7 +539,7 @@ private void killApplication(String applicationId) throws YarnException, */ private void moveApplicationAcrossQueues(String applicationId, String queue) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); ApplicationReport appReport = client.getApplicationReport(appId); if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED || appReport.getYarnApplicationState() == YarnApplicationState.KILLED @@ -565,7 +564,7 @@ private void failApplicationAttempt(String attemptId) throws YarnException, IOException { ApplicationId appId; ApplicationAttemptId attId; - attId = ConverterUtils.toApplicationAttemptId(attemptId); + attId = ApplicationAttemptId.fromString(attemptId); appId = attId.getApplicationId(); sysout.println("Failing attempt " + attId + " of application " + appId); @@ -583,8 +582,8 @@ private int printApplicationReport(String applicationId) throws YarnException, IOException { ApplicationReport appReport = null; try { - appReport = client.getApplicationReport(ConverterUtils - .toApplicationId(applicationId)); + appReport = client.getApplicationReport( + ApplicationId.fromString(applicationId)); } catch (ApplicationNotFoundException e) { sysout.println("Application with id '" + applicationId + "' doesn't exist in RM or Timeline Server."); @@ -684,7 +683,7 @@ private void listApplicationAttempts(String applicationId) throws YarnException, new OutputStreamWriter(sysout, Charset.forName("UTF-8"))); List appAttemptsReport = client - .getApplicationAttempts(ConverterUtils.toApplicationId(applicationId)); + .getApplicationAttempts(ApplicationId.fromString(applicationId)); writer.println("Total number of application attempts " + ":" + appAttemptsReport.size()); writer.printf(APPLICATION_ATTEMPTS_PATTERN, "ApplicationAttempt-Id", @@ -711,8 +710,8 @@ private void listContainers(String appAttemptId) throws YarnException, PrintWriter writer = new PrintWriter( new OutputStreamWriter(sysout, Charset.forName("UTF-8"))); - List appsReport = client - .getContainers(ConverterUtils.toApplicationAttemptId(appAttemptId)); + List appsReport = client.getContainers( + ApplicationAttemptId.fromString(appAttemptId)); writer.println("Total number of containers " + ":" + appsReport.size()); writer.printf(CONTAINER_PATTERN, "Container-Id", "Start Time", "Finish Time", "State", "Host", "Node Http Address", "LOG-URL"); @@ -735,7 +734,7 @@ private void listContainers(String appAttemptId) throws YarnException, */ private void updateApplicationPriority(String applicationId, String priority) throws YarnException, IOException { - ApplicationId appId = ConverterUtils.toApplicationId(applicationId); + ApplicationId appId = ApplicationId.fromString(applicationId); Priority newAppPriority = Priority.newInstance(Integer.parseInt(priority)); sysout.println("Updating priority of an application " + applicationId); Priority updateApplicationPriority = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java index d62ee5ee40e..4fdb57b6bca 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java @@ -153,7 +153,7 @@ public int run(String[] args) throws Exception { ApplicationId appId = null; try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (Exception e) { System.err.println("Invalid ApplicationId specified"); return -1; @@ -456,8 +456,8 @@ public ContainerReport getContainerReport(String containerIdStr) throws YarnException, IOException { YarnClient yarnClient = createYarnClient(); try { - return yarnClient.getContainerReport(ConverterUtils - .toContainerId(containerIdStr)); + return yarnClient.getContainerReport( + ContainerId.fromString(containerIdStr)); } finally { yarnClient.close(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java index a89551f9c51..f51fee929cf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java @@ -243,7 +243,7 @@ private void listDetailedClusterNodes(Set nodeStates) */ private void printNodeStatus(String nodeIdStr) throws YarnException, IOException { - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); List nodesReport = client.getNodeReports(); // Use PrintWriter.println, which uses correct platform line ending. ByteArrayOutputStream baos = new ByteArrayOutputStream(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java index d407c206f5b..aa7fc30344b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java @@ -427,7 +427,7 @@ private int updateNodeResource(String nodeIdStr, int memSize, ResourceManagerAdministrationProtocol adminProtocol = createAdminProtocol(); UpdateNodeResourceRequest request = recordFactory.newRecordInstance(UpdateNodeResourceRequest.class); - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); Resource resource = Resources.createResource(memSize, cores); Map resourceMap = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java index 057594d5bb2..15513338391 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java @@ -222,7 +222,7 @@ public void testUpdateNodeResource() throws Exception { verify(admin).updateNodeResource(argument.capture()); UpdateNodeResourceRequest request = argument.getValue(); Map resourceMap = request.getNodeResourceMap(); - NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); + NodeId nodeId = NodeId.fromString(nodeIdStr); Resource expectedResource = Resources.createResource(memSize, cores); ResourceOption resource = resourceMap.get(nodeId); assertNotNull("resource for " + nodeIdStr + " shouldn't be null.", diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java index 4c1d152ccd5..a80f9d7629e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogDeletionService.java @@ -99,7 +99,7 @@ private static void deleteOldLogDirsFrom(Path dir, long cutoffMillis, if(appDir.isDirectory() && appDir.getModificationTime() < cutoffMillis) { boolean appTerminated = - isApplicationTerminated(ConverterUtils.toApplicationId(appDir + isApplicationTerminated(ApplicationId.fromString(appDir .getPath().getName()), rmClient); if(appTerminated && shouldDeleteLogDir(appDir, cutoffMillis, fs)) { try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index 98ffce16f3f..8b213d5cc13 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -209,14 +209,11 @@ public LogValue(List rootLogDirs, ContainerId containerId, public Set getPendingLogFilesToUploadForThisContainer() { Set pendingUploadFiles = new HashSet(); for (String rootLogDir : this.rootLogDirs) { - File appLogDir = - new File(rootLogDir, - ConverterUtils.toString( - this.containerId.getApplicationAttemptId(). - getApplicationId()) - ); + File appLogDir = new File(rootLogDir, + this.containerId.getApplicationAttemptId(). + getApplicationId().toString()); File containerLogDir = - new File(appLogDir, ConverterUtils.toString(this.containerId)); + new File(appLogDir, this.containerId.toString()); if (!containerLogDir.isDirectory()) { continue; // ContainerDir may have been deleted by the user. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java index 3811054a452..26b2b01abcc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java @@ -59,7 +59,7 @@ public class LogCLIHelpers implements Configurable { public int dumpAContainersLogs(String appId, String containerId, String nodeId, String jobOwner) throws IOException { ContainerLogsRequest options = new ContainerLogsRequest(); - options.setAppId(ConverterUtils.toApplicationId(appId)); + options.setAppId(ApplicationId.fromString(appId)); options.setContainerId(containerId); options.setNodeId(nodeId); options.setAppOwner(jobOwner); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java index acd29fb02d6..67bc2b74fd1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java @@ -18,18 +18,13 @@ package org.apache.hadoop.yarn.util; -import static org.apache.hadoop.yarn.util.StringHelper._split; - import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Map.Entry; -import java.util.NoSuchElementException; import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.SecurityUtil; @@ -41,7 +36,6 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; /** @@ -49,7 +43,7 @@ * from/to 'serializableFormat' to/from hadoop/nativejava data structures. * */ -@Private +@Public public class ConverterUtils { public static final String APPLICATION_PREFIX = "application"; @@ -58,174 +52,114 @@ public class ConverterUtils { /** * return a hadoop path from a given url + * This method is deprecated, use {@link URL#toPath()} instead. * * @param url * url to convert * @return path from {@link URL} * @throws URISyntaxException */ + @Public + @Deprecated public static Path getPathFromYarnURL(URL url) throws URISyntaxException { - String scheme = url.getScheme() == null ? "" : url.getScheme(); - - String authority = ""; - if (url.getHost() != null) { - authority = url.getHost(); - if (url.getUserInfo() != null) { - authority = url.getUserInfo() + "@" + authority; - } - if (url.getPort() > 0) { - authority += ":" + url.getPort(); - } - } - - return new Path( - (new URI(scheme, authority, url.getFile(), null, null)).normalize()); + return url.toPath(); } - - /** - * change from CharSequence to string for map key and value - * @param env map for converting - * @return string,string map + + /* + * This method is deprecated, use {@link URL#fromPath(Path)} instead. */ - public static Map convertToString( - Map env) { - - Map stringMap = new HashMap(); - for (Entry entry: env.entrySet()) { - stringMap.put(entry.getKey().toString(), entry.getValue().toString()); - } - return stringMap; - } - + @Public + @Deprecated public static URL getYarnUrlFromPath(Path path) { - return getYarnUrlFromURI(path.toUri()); + return URL.fromPath(path); } + /* + * This method is deprecated, use {@link URL#fromURI(URI)} instead. + */ + @Public + @Deprecated public static URL getYarnUrlFromURI(URI uri) { - URL url = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(URL.class); - if (uri.getHost() != null) { - url.setHost(uri.getHost()); - } - if (uri.getUserInfo() != null) { - url.setUserInfo(uri.getUserInfo()); - } - url.setPort(uri.getPort()); - url.setScheme(uri.getScheme()); - url.setFile(uri.getPath()); - return url; + return URL.fromURI(uri); } + /* + * This method is deprecated, use {@link ApplicationId#toString()} instead. + */ + @Public + @Deprecated public static String toString(ApplicationId appId) { return appId.toString(); } + /* + * This method is deprecated, use {@link ApplicationId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ApplicationId toApplicationId(RecordFactory recordFactory, - String appIdStr) { - Iterator it = _split(appIdStr).iterator(); - if (!it.next().equals(APPLICATION_PREFIX)) { - throw new IllegalArgumentException("Invalid ApplicationId prefix: " - + appIdStr + ". The valid ApplicationId should start with prefix " - + APPLICATION_PREFIX); - } - try { - return toApplicationId(recordFactory, it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid ApplicationId: " + appIdStr, - n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid ApplicationId: " + appIdStr, - e); - } - } - - private static ApplicationId toApplicationId(RecordFactory recordFactory, - Iterator it) { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - return appId; - } - - private static ApplicationAttemptId toApplicationAttemptId( - Iterator it) throws NumberFormatException { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - ApplicationAttemptId appAttemptId = - ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next())); - return appAttemptId; - } - - private static ApplicationId toApplicationId( - Iterator it) throws NumberFormatException { - ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()), - Integer.parseInt(it.next())); - return appId; + String applicationIdStr) { + return ApplicationId.fromString(applicationIdStr); } + /* + * This method is deprecated, use {@link ContainerId#toString()} instead. + */ + @Public + @Deprecated public static String toString(ContainerId cId) { return cId == null ? null : cId.toString(); } - + + @Private + @InterfaceStability.Unstable public static NodeId toNodeIdWithDefaultPort(String nodeIdStr) { if (nodeIdStr.indexOf(":") < 0) { - return toNodeId(nodeIdStr + ":0"); + return NodeId.fromString(nodeIdStr + ":0"); } - return toNodeId(nodeIdStr); + return NodeId.fromString(nodeIdStr); } + /* + * This method is deprecated, use {@link NodeId#fromString(String)} instead. + */ + @Public + @Deprecated public static NodeId toNodeId(String nodeIdStr) { - String[] parts = nodeIdStr.split(":"); - if (parts.length != 2) { - throw new IllegalArgumentException("Invalid NodeId [" + nodeIdStr - + "]. Expected host:port"); - } - try { - NodeId nodeId = - NodeId.newInstance(parts[0].trim(), Integer.parseInt(parts[1])); - return nodeId; - } catch (NumberFormatException e) { - throw new IllegalArgumentException("Invalid port: " + parts[1], e); - } + return NodeId.fromString(nodeIdStr); } + /* + * This method is deprecated, use {@link ContainerId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ContainerId toContainerId(String containerIdStr) { return ContainerId.fromString(containerIdStr); } - + + /* + * This method is deprecated, use {@link ApplicationAttemptId#toString()} + * instead. + */ + @Public + @Deprecated public static ApplicationAttemptId toApplicationAttemptId( - String applicationAttmeptIdStr) { - Iterator it = _split(applicationAttmeptIdStr).iterator(); - if (!it.next().equals(APPLICATION_ATTEMPT_PREFIX)) { - throw new IllegalArgumentException("Invalid AppAttemptId prefix: " - + applicationAttmeptIdStr); - } - try { - return toApplicationAttemptId(it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid AppAttemptId: " - + applicationAttmeptIdStr, n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid AppAttemptId: " - + applicationAttmeptIdStr, e); - } + String applicationAttemptIdStr) { + return ApplicationAttemptId.fromString(applicationAttemptIdStr); } + /* + * This method is deprecated, use {@link ApplicationId#fromString(String)} + * instead. + */ + @Public + @Deprecated public static ApplicationId toApplicationId( String appIdStr) { - Iterator it = _split(appIdStr).iterator(); - if (!it.next().equals(APPLICATION_PREFIX)) { - throw new IllegalArgumentException("Invalid ApplicationId prefix: " - + appIdStr + ". The valid ApplicationId should start with prefix " - + APPLICATION_PREFIX); - } - try { - return toApplicationId(it); - } catch (NumberFormatException n) { - throw new IllegalArgumentException("Invalid ApplicationId: " - + appIdStr, n); - } catch (NoSuchElementException e) { - throw new IllegalArgumentException("Invalid ApplicationId: " - + appIdStr, e); - } + return ApplicationId.fromString(appIdStr); } /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java index bd9c907418e..de18dc63d5a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java @@ -346,7 +346,7 @@ private long unpack(File localrsrc, File dst) throws IOException { public Path call() throws Exception { final Path sCopy; try { - sCopy = ConverterUtils.getPathFromYarnURL(resource.getResource()); + sCopy = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException("Invalid resource", e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java index 2fc8dfcc9c2..1da6e232ea3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java @@ -290,7 +290,7 @@ private ContainerId verifyAndGetContainerId(Block html) { } ContainerId containerId = null; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); } catch (IllegalArgumentException e) { html.h1() ._("Cannot get container logs for invalid containerId: " @@ -308,7 +308,7 @@ private NodeId verifyAndGetNodeId(Block html) { } NodeId nodeId = null; try { - nodeId = ConverterUtils.toNodeId(nodeIdStr); + nodeId = NodeId.fromString(nodeIdStr); } catch (IllegalArgumentException e) { html.h1()._("Cannot get container logs. Invalid nodeId: " + nodeIdStr) ._(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java index 3aa773aea6b..624554173cd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/util/WebAppUtils.java @@ -392,7 +392,7 @@ public static ApplicationId parseApplicationId(RecordFactory recordFactory, } ApplicationId aid = null; try { - aid = ConverterUtils.toApplicationId(recordFactory, appId); + aid = ApplicationId.fromString(appId); } catch (Exception e) { throw new BadRequestException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java index 3cec38b060c..077558b96a6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestConverterUtils.java @@ -34,55 +34,56 @@ public class TestConverterUtils { @Test public void testConvertUrlWithNoPort() throws URISyntaxException { Path expectedPath = new Path("hdfs://foo.com"); - URL url = ConverterUtils.getYarnUrlFromPath(expectedPath); - Path actualPath = ConverterUtils.getPathFromYarnURL(url); + URL url = URL.fromPath(expectedPath); + Path actualPath = url.toPath(); assertEquals(expectedPath, actualPath); } @Test public void testConvertUrlWithUserinfo() throws URISyntaxException { Path expectedPath = new Path("foo://username:password@example.com:8042"); - URL url = ConverterUtils.getYarnUrlFromPath(expectedPath); - Path actualPath = ConverterUtils.getPathFromYarnURL(url); + URL url = URL.fromPath(expectedPath); + Path actualPath = url.toPath(); assertEquals(expectedPath, actualPath); } @Test public void testContainerId() throws URISyntaxException { ContainerId id = TestContainerId.newContainerId(0, 0, 0, 0); - String cid = ConverterUtils.toString(id); + String cid = id.toString(); assertEquals("container_0_0000_00_000000", cid); - ContainerId gen = ConverterUtils.toContainerId(cid); + ContainerId gen = ContainerId.fromString(cid); assertEquals(gen, id); } @Test public void testContainerIdWithEpoch() throws URISyntaxException { ContainerId id = TestContainerId.newContainerId(0, 0, 0, 25645811); - String cid = ConverterUtils.toString(id); + String cid = id.toString(); assertEquals("container_0_0000_00_25645811", cid); - ContainerId gen = ConverterUtils.toContainerId(cid); + ContainerId gen = ContainerId.fromString(cid); assertEquals(gen.toString(), id.toString()); long ts = System.currentTimeMillis(); ContainerId id2 = TestContainerId.newContainerId(36473, 4365472, ts, 4298334883325L); - String cid2 = ConverterUtils.toString(id2); + String cid2 = id2.toString(); assertEquals( "container_e03_" + ts + "_36473_4365472_999799999997", cid2); - ContainerId gen2 = ConverterUtils.toContainerId(cid2); + ContainerId gen2 = ContainerId.fromString(cid2); assertEquals(gen2.toString(), id2.toString()); ContainerId id3 = TestContainerId.newContainerId(36473, 4365472, ts, 844424930131965L); - String cid3 = ConverterUtils.toString(id3); + String cid3 = id3.toString(); assertEquals( "container_e767_" + ts + "_36473_4365472_1099511627773", cid3); - ContainerId gen3 = ConverterUtils.toContainerId(cid3); + ContainerId gen3 = ContainerId.fromString(cid3); assertEquals(gen3.toString(), id3.toString()); } @Test + @SuppressWarnings("deprecation") public void testContainerIdNull() throws URISyntaxException { assertNull(ConverterUtils.toString((ContainerId)null)); } @@ -101,16 +102,19 @@ public void testNodeIdWithDefaultPort() throws URISyntaxException { } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testInvalidContainerId() { - ConverterUtils.toContainerId("container_e20_1423221031460_0003_01"); + ContainerId.fromString("container_e20_1423221031460_0003_01"); } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testInvalidAppattemptId() { ConverterUtils.toApplicationAttemptId("appattempt_1423221031460"); } @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("deprecation") public void testApplicationId() { ConverterUtils.toApplicationId("application_1423221031460"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java index 376b27d4bf6..877dd080afb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestFSDownload.java @@ -53,6 +53,7 @@ import java.util.zip.ZipOutputStream; import org.apache.hadoop.util.concurrent.HadoopExecutors; +import org.apache.hadoop.yarn.api.records.URL; import org.junit.Assert; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; @@ -103,7 +104,7 @@ static LocalResource createFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis) throws IOException { createFile(files, p, len, r); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(p)); + ret.setResource(URL.fromPath(p)); ret.setSize(len); ret.setType(LocalResourceType.FILE); ret.setVisibility(vis); @@ -134,7 +135,7 @@ static LocalResource createJar(FileContext files, Path p, LOG.info("Done writing jar stream "); out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(p)); + ret.setResource(URL.fromPath(p)); FileStatus status = files.getFileStatus(p); ret.setSize(status.getLen()); ret.setTimestamp(status.getModificationTime()); @@ -162,7 +163,7 @@ static LocalResource createTarFile(FileContext files, Path p, int len, out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".tar"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -190,7 +191,7 @@ static LocalResource createTgzFile(FileContext files, Path p, int len, out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".tar.gz"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -216,7 +217,7 @@ static LocalResource createJarFile(FileContext files, Path p, int len, out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".jar"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); @@ -242,7 +243,7 @@ static LocalResource createZipFile(FileContext files, Path p, int len, out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path(p.toString() + ret.setResource(URL.fromPath(new Path(p.toString() + ".ZIP"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java index aedf6f656b6..84d45439445 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerOnTimelineStore.java @@ -278,7 +278,7 @@ private static ApplicationReportExt convertToApplicationReport( } if (field == ApplicationReportField.USER_AND_ACLS) { return new ApplicationReportExt(ApplicationReport.newInstance( - ConverterUtils.toApplicationId(entity.getEntityId()), + ApplicationId.fromString(entity.getEntityId()), latestApplicationAttemptId, user, queue, name, null, -1, null, state, diagnosticsInfo, null, createdTime, finishedTime, finalStatus, null, null, progress, type, null, appTags, @@ -394,13 +394,10 @@ private static ApplicationReportExt convertToApplicationReport( } if (eventInfo .containsKey(ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO)) { - latestApplicationAttemptId = - ConverterUtils - .toApplicationAttemptId( - eventInfo - .get( - ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO) - .toString()); + latestApplicationAttemptId = ApplicationAttemptId.fromString( + eventInfo.get( + ApplicationMetricsConstants.LATEST_APP_ATTEMPT_EVENT_INFO) + .toString()); } if (eventInfo .containsKey(ApplicationMetricsConstants.DIAGNOSTICS_INFO_EVENT_INFO)) { @@ -426,7 +423,7 @@ private static ApplicationReportExt convertToApplicationReport( } } return new ApplicationReportExt(ApplicationReport.newInstance( - ConverterUtils.toApplicationId(entity.getEntityId()), + ApplicationId.fromString(entity.getEntityId()), latestApplicationAttemptId, user, queue, name, null, -1, null, state, diagnosticsInfo, null, createdTime, finishedTime, finalStatus, appResources, null, progress, type, null, appTags, unmanagedApplication, @@ -471,7 +468,7 @@ private static ApplicationAttemptReport convertToApplicationAttemptReport( if (eventInfo .containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO)) { amContainerId = - ConverterUtils.toContainerId(eventInfo.get( + ContainerId.fromString(eventInfo.get( AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO) .toString()); } @@ -513,7 +510,7 @@ private static ApplicationAttemptReport convertToApplicationAttemptReport( if (eventInfo .containsKey(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO)) { amContainerId = - ConverterUtils.toContainerId(eventInfo.get( + ContainerId.fromString(eventInfo.get( AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO) .toString()); } @@ -521,7 +518,7 @@ private static ApplicationAttemptReport convertToApplicationAttemptReport( } } return ApplicationAttemptReport.newInstance( - ConverterUtils.toApplicationAttemptId(entity.getEntityId()), + ApplicationAttemptId.fromString(entity.getEntityId()), host, rpcPort, trackingUrl, originalTrackingUrl, diagnosticsInfo, state, amContainerId); } @@ -610,7 +607,7 @@ private static ContainerReport convertToContainerReport( } } ContainerId containerId = - ConverterUtils.toContainerId(entity.getEntityId()); + ContainerId.fromString(entity.getEntityId()); String logUrl = null; NodeId allocatedNode = null; if (allocatedHost != null) { @@ -623,7 +620,7 @@ private static ContainerReport convertToContainerReport( user); } return ContainerReport.newInstance( - ConverterUtils.toContainerId(entity.getEntityId()), + ContainerId.fromString(entity.getEntityId()), Resource.newInstance(allocatedMem, allocatedVcore), allocatedNode, Priority.newInstance(allocatedPriority), createdTime, finishedTime, diagnosticsInfo, logUrl, exitStatus, state, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java index c340b190252..295b8ab6351 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java @@ -204,7 +204,7 @@ public Map getAllApplications() FileStatus[] files = fs.listStatus(rootDirPath); for (FileStatus file : files) { ApplicationId appId = - ConverterUtils.toApplicationId(file.getPath().getName()); + ApplicationId.fromString(file.getPath().getName()); try { ApplicationHistoryData historyData = getApplication(appId); if (historyData != null) { @@ -231,8 +231,8 @@ public Map getAllApplications() HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.startsWith( ConverterUtils.APPLICATION_ATTEMPT_PREFIX)) { - ApplicationAttemptId appAttemptId = - ConverterUtils.toApplicationAttemptId(entry.key.id); + ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString( + entry.key.id); if (appAttemptId.getApplicationId().equals(appId)) { ApplicationAttemptHistoryData historyData = historyDataMap.get(appAttemptId); @@ -385,7 +385,7 @@ public Map getContainers( HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.startsWith(ConverterUtils.CONTAINER_PREFIX)) { ContainerId containerId = - ConverterUtils.toContainerId(entry.key.id); + ContainerId.fromString(entry.key.id); if (containerId.getApplicationAttemptId().equals(appAttemptId)) { ContainerHistoryData historyData = historyDataMap.get(containerId); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java index 22e45fa3d37..134f22de331 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java @@ -113,7 +113,7 @@ public static LocalResource newLocalResource(URL url, LocalResourceType type, public static LocalResource newLocalResource(URI uri, LocalResourceType type, LocalResourceVisibility visibility, long size, long timestamp, boolean shouldBeUploadedToSharedCache) { - return newLocalResource(ConverterUtils.getYarnUrlFromURI(uri), type, + return newLocalResource(URL.fromURI(uri), type, visibility, size, timestamp, shouldBeUploadedToSharedCache); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java index 9c2a1ae04de..798c3726739 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/AppAttemptBlock.java @@ -65,7 +65,7 @@ protected void render(Block html) { } try { - appAttemptId = ConverterUtils.toApplicationAttemptId(attemptid); + appAttemptId = ApplicationAttemptId.fromString(attemptid); } catch (IllegalArgumentException e) { puts("Invalid application attempt ID: " + attemptid); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java index cae8d2e6fb5..893e82384f2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/ContainerBlock.java @@ -59,7 +59,7 @@ protected void render(Block html) { ContainerId containerId = null; try { - containerId = ConverterUtils.toContainerId(containerid); + containerId = ContainerId.fromString(containerid); } catch (IllegalArgumentException e) { puts("Invalid container ID: " + containerid); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java index 19ea30136e8..904c5118f44 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java @@ -431,7 +431,7 @@ protected static ApplicationId parseApplicationId(String appId) { } ApplicationId aid = null; try { - aid = ConverterUtils.toApplicationId(appId); + aid = ApplicationId.fromString(appId); } catch (Exception e) { throw new BadRequestException(e); } @@ -449,7 +449,7 @@ protected static ApplicationAttemptId parseApplicationAttemptId( } ApplicationAttemptId aaid = null; try { - aaid = ConverterUtils.toApplicationAttemptId(appAttemptId); + aaid = ApplicationAttemptId.fromString(appAttemptId); } catch (Exception e) { throw new BadRequestException(e); } @@ -466,7 +466,7 @@ protected static ContainerId parseContainerId(String containerId) { } ContainerId cid = null; try { - cid = ConverterUtils.toContainerId(containerId); + cid = ContainerId.fromString(containerId); } catch (Exception e) { throw new BadRequestException(e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java index 8f4b1221104..f8f19bdadce 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java @@ -167,11 +167,10 @@ public int launchContainer(ContainerStartContext ctx) throws IOException { ContainerId containerId = container.getContainerId(); // create container dirs on all disks - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); String appIdStr = - ConverterUtils.toString( containerId.getApplicationAttemptId(). - getApplicationId()); + getApplicationId().toString(); for (String sLocalDir : localDirs) { Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, user); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java index 72da2365f5a..2b184694364 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java @@ -195,9 +195,9 @@ public int launchContainer(ContainerStartContext ctx) throws IOException { ContainerId containerId = container.getContainerId(); // create container dirs on all disks - String containerIdStr = ConverterUtils.toString(containerId); - String appIdStr = ConverterUtils.toString( - containerId.getApplicationAttemptId().getApplicationId()); + String containerIdStr = containerId.toString(); + String appIdStr = + containerId.getApplicationAttemptId().getApplicationId().toString(); for (String sLocalDir : localDirs) { Path usersdir = new Path(sLocalDir, ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, userName); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java index e46ce569ee9..1072b5a415f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java @@ -319,7 +319,7 @@ public int launchContainer(ContainerStartContext ctx) throws IOException { String runAsUser = getRunAsUser(user); ContainerId containerId = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); resourcesHandler.preExecute(containerId, container.getResource()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java index 304488eab95..7a6e1cfa206 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/container/ContainerImpl.java @@ -1361,7 +1361,7 @@ public void handle(ContainerEvent event) { public String toString() { this.readLock.lock(); try { - return ConverterUtils.toString(this.containerId); + return this.containerId.toString(); } finally { this.readLock.unlock(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java index a3b53e35e46..7e9030cce23 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java @@ -163,7 +163,7 @@ public Integer call() { final ContainerLaunchContext launchContext = container.getLaunchContext(); ContainerId containerID = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerID); + String containerIdStr = containerID.toString(); final List command = launchContext.getCommands(); int ret = -1; @@ -326,7 +326,7 @@ protected boolean validateContainerState() { protected List getContainerLogDirs(List logDirs) { List containerLogDirs = new ArrayList<>(logDirs.size()); String appIdStr = app.getAppId().toString(); - String containerIdStr = ConverterUtils.toString(container.getContainerId()); + String containerIdStr = container.getContainerId().toString(); String relativeContainerLogDir = ContainerLaunch .getRelativeContainerLogDir(appIdStr, containerIdStr); @@ -520,7 +520,7 @@ protected String getPidFileSubpath(String appIdStr, String containerIdStr) { @SuppressWarnings("unchecked") // dispatcher not typed public void cleanupContainer() throws IOException { ContainerId containerId = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); LOG.info("Cleaning up container " + containerIdStr); try { @@ -616,7 +616,7 @@ public void signalContainer(SignalContainerCommand command) throws IOException { ContainerId containerId = container.getContainerTokenIdentifier().getContainerID(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); String user = container.getUser(); Signal signal = translateCommandToSignal(command); if (signal.equals(Signal.NULL)) { @@ -708,7 +708,7 @@ public static Signal translateCommandToSignal( */ private String getContainerPid(Path pidFilePath) throws Exception { String containerIdStr = - ConverterUtils.toString(container.getContainerId()); + container.getContainerId().toString(); String processId = null; LOG.debug("Accessing pid for container " + containerIdStr + " from pid file " + pidFilePath); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java index 711d5cdc261..1292df6d312 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerRelaunch.java @@ -65,7 +65,7 @@ public Integer call() { } ContainerId containerId = container.getContainerId(); - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); int ret = -1; Path containerLogDir; try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java index b9bdcc6c0d7..3cd31b703d5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/RecoveredContainerLaunch.java @@ -68,9 +68,9 @@ public RecoveredContainerLaunch(Context context, Configuration configuration, public Integer call() { int retCode = ExitCode.LOST.getExitCode(); ContainerId containerId = container.getContainerId(); - String appIdStr = ConverterUtils.toString( - containerId.getApplicationAttemptId().getApplicationId()); - String containerIdStr = ConverterUtils.toString(containerId); + String appIdStr = + containerId.getApplicationAttemptId().getApplicationId().toString(); + String containerIdStr = containerId.toString(); dispatcher.getEventHandler().handle(new ContainerEvent(containerId, ContainerEventType.CONTAINER_LAUNCHED)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java index 57cc346d896..65fd9d8ece2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java @@ -56,6 +56,7 @@ import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.SerializedException; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; @@ -295,7 +296,7 @@ private LocalizerStatus createStatus() throws InterruptedException { try { Path localPath = fPath.get(); stat.setLocalPath( - ConverterUtils.getYarnUrlFromPath(localPath)); + URL.fromPath(localPath)); stat.setLocalSize( FileUtil.getDU(new File(localPath.getParent().toUri()))); stat.setStatus(ResourceStatusType.FETCH_SUCCESS); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java index 607d0b40866..d2e8e22d459 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalResourceRequest.java @@ -43,7 +43,7 @@ public class LocalResourceRequest */ public LocalResourceRequest(LocalResource resource) throws URISyntaxException { - this(ConverterUtils.getPathFromYarnURL(resource.getResource()), + this(resource.getResource().toPath(), resource.getTimestamp(), resource.getType(), resource.getVisibility(), @@ -133,7 +133,7 @@ public LocalResourceType getType() { @Override public URL getResource() { - return ConverterUtils.getYarnUrlFromPath(loc); + return URL.fromPath(loc); } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java index b2413add0a5..409cc29f315 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java @@ -79,6 +79,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; @@ -301,7 +302,7 @@ public void recoverLocalizedResources(RecoveredLocalizationState state) trackerState = appEntry.getValue(); if (!trackerState.isEmpty()) { ApplicationId appId = appEntry.getKey(); - String appIdStr = ConverterUtils.toString(appId); + String appIdStr = appId.toString(); LocalResourcesTracker tracker = new LocalResourcesTrackerImpl(user, appId, dispatcher, false, super.getConfig(), stateStore); LocalResourcesTracker oldTracker = appRsrc.putIfAbsent(appIdStr, @@ -442,7 +443,7 @@ private void handleInitApplicationResources(Application app) { String userName = app.getUser(); privateRsrc.putIfAbsent(userName, new LocalResourcesTrackerImpl(userName, null, dispatcher, true, super.getConfig(), stateStore)); - String appIdStr = ConverterUtils.toString(app.getAppId()); + String appIdStr = app.getAppId().toString(); appRsrc.putIfAbsent(appIdStr, new LocalResourcesTrackerImpl(app.getUser(), app.getAppId(), dispatcher, false, super.getConfig(), stateStore)); // 1) Signal container init @@ -491,7 +492,7 @@ private void handleInitContainerResources( private void handleContainerResourcesLocalized( ContainerLocalizationEvent event) { Container c = event.getContainer(); - String locId = ConverterUtils.toString(c.getContainerId()); + String locId = c.getContainerId().toString(); localizerTracker.endContainerLocalization(locId); } @@ -528,14 +529,15 @@ private void handleCleanupContainerResources( c.getContainerId())); } } - String locId = ConverterUtils.toString(c.getContainerId()); + String locId = c.getContainerId().toString(); localizerTracker.cleanupPrivLocalizers(locId); // Delete the container directories String userName = c.getUser(); String containerIDStr = c.toString(); - String appIDStr = ConverterUtils.toString( - c.getContainerId().getApplicationAttemptId().getApplicationId()); + String appIDStr = + c.getContainerId().getApplicationAttemptId().getApplicationId() + .toString(); // Try deleting from good local dirs and full local dirs because a dir might // have gone bad while the app was running(disk full). In addition @@ -583,7 +585,7 @@ private void handleDestroyApplicationResources(Application application) { ApplicationId appId = application.getAppId(); String appIDStr = application.toString(); LocalResourcesTracker appLocalRsrcsTracker = - appRsrc.remove(ConverterUtils.toString(appId)); + appRsrc.remove(appId.toString()); if (appLocalRsrcsTracker != null) { for (LocalizedResource rsrc : appLocalRsrcsTracker ) { Path localPath = rsrc.getLocalPath(); @@ -637,7 +639,7 @@ LocalResourcesTracker getLocalResourcesTracker( case PRIVATE: return privateRsrc.get(user); case APPLICATION: - return appRsrc.get(ConverterUtils.toString(appId)); + return appRsrc.get(appId.toString()); } } @@ -977,7 +979,7 @@ private LocalResource findNextResource() { LocalResourceRequest nextRsrc = nRsrc.getRequest(); LocalResource next = recordFactory.newRecordInstance(LocalResource.class); - next.setResource(ConverterUtils.getYarnUrlFromPath(nextRsrc + next.setResource(URL.fromPath(nextRsrc .getPath())); next.setTimestamp(nextRsrc.getTimestamp()); next.setType(nextRsrc.getType()); @@ -1028,8 +1030,8 @@ LocalizerHeartbeatResponse processHeartbeat( try { getLocalResourcesTracker(req.getVisibility(), user, applicationId) .handle( - new ResourceLocalizedEvent(req, ConverterUtils - .getPathFromYarnURL(stat.getLocalPath()), stat.getLocalSize())); + new ResourceLocalizedEvent(req, stat.getLocalPath().toPath(), + stat.getLocalSize())); } catch (URISyntaxException e) { } // unlocking the resource and removing it from scheduled resource @@ -1142,8 +1144,8 @@ public void run() { .setNmPrivateContainerTokens(nmPrivateCTokensPath) .setNmAddr(localizationServerAddress) .setUser(context.getUser()) - .setAppId(ConverterUtils.toString(context.getContainerId() - .getApplicationAttemptId().getApplicationId())) + .setAppId(context.getContainerId() + .getApplicationAttemptId().getApplicationId().toString()) .setLocId(localizerId) .setDirsHandler(dirsHandler) .build()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java index 2e05dd7abdb..0e732a7ce58 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/event/LocalizerResourceRequestEvent.java @@ -37,7 +37,7 @@ public class LocalizerResourceRequestEvent extends LocalizerEvent { public LocalizerResourceRequestEvent(LocalizedResource resource, LocalResourceVisibility vis, LocalizerContext context, String pattern) { super(LocalizerEventType.REQUEST_RESOURCE_LOCALIZATION, - ConverterUtils.toString(context.getContainerId())); + context.getContainerId().toString()); this.vis = vis; this.context = context; this.resource = resource; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java index 682b2726d1e..b034e7a209f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java @@ -211,7 +211,7 @@ boolean verifyAccess() throws IOException { final Path remotePath; try { - remotePath = ConverterUtils.getPathFromYarnURL(resource.getResource()); + remotePath = resource.getResource().toPath(); } catch (URISyntaxException e) { throw new IOException("Invalid resource", e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java index ba7836a040c..c70fa5b4c78 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java @@ -164,7 +164,7 @@ public AppLogAggregatorImpl(Dispatcher dispatcher, this.conf = conf; this.delService = deletionService; this.appId = appId; - this.applicationId = ConverterUtils.toString(appId); + this.applicationId = appId.toString(); this.userUgi = userUgi; this.dirsHandler = dirsHandler; this.remoteNodeLogFileForApp = remoteNodeLogFileForApp; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java index 8bd20402b4d..5fe27134954 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMLeveldbStateStoreService.java @@ -186,7 +186,7 @@ public List loadContainersState() if (idEndPos < 0) { throw new IOException("Unable to determine container in key: " + key); } - ContainerId containerId = ConverterUtils.toContainerId( + ContainerId containerId = ContainerId.fromString( key.substring(CONTAINERS_KEY_PREFIX.length(), idEndPos)); String keyPrefix = key.substring(0, idEndPos+1); RecoveredContainerState rcs = loadContainerState(containerId, @@ -654,7 +654,7 @@ private RecoveredUserResources loadUserLocalizedResources( throw new IOException("Unable to determine appID in resource key: " + key); } - ApplicationId appId = ConverterUtils.toApplicationId( + ApplicationId appId = ApplicationId.fromString( key.substring(appIdStartPos, appIdEndPos)); userResources.appTrackerStates.put(appId, loadResourceTrackerState(iter, key.substring(0, appIdEndPos+1))); @@ -822,7 +822,7 @@ public RecoveredNMTokensState loadNMTokensState() throws IOException { ApplicationAttemptId.appAttemptIdStrPrefix)) { ApplicationAttemptId attempt; try { - attempt = ConverterUtils.toApplicationAttemptId(key); + attempt = ApplicationAttemptId.fromString(key); } catch (IllegalArgumentException e) { throw new IOException("Bad application master key state for " + fullKey, e); @@ -926,7 +926,7 @@ private static void loadContainerToken(RecoveredContainerTokensState state, ContainerId containerId; Long expTime; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); expTime = Long.parseLong(asString(value)); } catch (IllegalArgumentException e) { throw new IOException("Bad container token state for " + key, e); @@ -988,7 +988,7 @@ public RecoveredLogDeleterState loadLogDeleterState() throws IOException { String appIdStr = fullKey.substring(logDeleterKeyPrefixLength); ApplicationId appId = null; try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (IllegalArgumentException e) { LOG.warn("Skipping unknown log deleter key " + fullKey); continue; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java index 21cf1f27c24..21c3c064040 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/NodeManagerBuilderUtils.java @@ -28,7 +28,7 @@ public class NodeManagerBuilderUtils { public static ResourceLocalizationSpec newResourceLocalizationSpec( LocalResource rsrc, Path path) { - URL local = ConverterUtils.getYarnUrlFromPath(path); + URL local = URL.fromPath(path); ResourceLocalizationSpec resourceLocalizationSpec = Records.newRecord(ResourceLocalizationSpec.class); resourceLocalizationSpec.setDestinationDirectory(local); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java index 80d4db24988..5a7dba7ebc1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java @@ -27,6 +27,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.Shell; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.ConverterUtils; /** @@ -69,7 +70,7 @@ public static String getProcessId(Path path) throws IOException { // On Windows, pid is expected to be a container ID, so find first // line that parses successfully as a container ID. try { - ConverterUtils.toContainerId(temp); + ContainerId.fromString(temp); processId = temp; break; } catch (Exception e) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java index bc90d8e40f3..2783b18699c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java @@ -76,10 +76,9 @@ public ApplicationBlock(Context nmContext, Configuration conf) { @Override protected void render(Block html) { - ApplicationId applicationID = null; + ApplicationId applicationID; try { - applicationID = ConverterUtils.toApplicationId(this.recordFactory, - $(APPLICATION_ID)); + applicationID = ApplicationId.fromString($(APPLICATION_ID)); } catch (IllegalArgumentException e) { html.p()._("Invalid Application Id " + $(APPLICATION_ID))._(); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java index 2fd6b2cdf11..3e5f4d2e49d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java @@ -92,7 +92,7 @@ protected void render(Block html) { ContainerId containerId; try { - containerId = ConverterUtils.toContainerId($(CONTAINER_ID)); + containerId = ContainerId.fromString($(CONTAINER_ID)); } catch (IllegalArgumentException ex) { html.h1("Invalid container ID: " + $(CONTAINER_ID)); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java index 319f49be5a6..35e75939f30 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsUtils.java @@ -78,8 +78,8 @@ static List getContainerLogDirs(ContainerId containerId, List containerLogDirs = new ArrayList(logDirs.size()); for (String logDir : logDirs) { logDir = new File(logDir).toURI().getPath(); - String appIdStr = ConverterUtils.toString(containerId - .getApplicationAttemptId().getApplicationId()); + String appIdStr = containerId + .getApplicationAttemptId().getApplicationId().toString(); File appLogDir = new File(logDir, appIdStr); containerLogDirs.add(new File(appLogDir, containerId.toString())); } @@ -160,7 +160,7 @@ private static void checkState(ContainerState state) { public static FileInputStream openLogFileForRead(String containerIdStr, File logFile, Context context) throws IOException { - ContainerId containerId = ConverterUtils.toContainerId(containerIdStr); + ContainerId containerId = ContainerId.fromString(containerIdStr); ApplicationId applicationId = containerId.getApplicationAttemptId() .getApplicationId(); String user = context.getApplications().get( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java index f4367bcb892..a1e0bc77108 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java @@ -63,7 +63,7 @@ public ContainerBlock(Context nmContext) { protected void render(Block html) { ContainerId containerID; try { - containerID = ConverterUtils.toContainerId($(CONTAINER_ID)); + containerID = ContainerId.fromString($(CONTAINER_ID)); } catch (IllegalArgumentException e) { html.p()._("Invalid containerId " + $(CONTAINER_ID))._(); return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java index efc0e7e6d96..3a30392ee5e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java @@ -181,7 +181,7 @@ public ContainerInfo getNodeContainer(@javax.ws.rs.core.Context ContainerId containerId = null; init(); try { - containerId = ConverterUtils.toContainerId(id); + containerId = ContainerId.fromString(id); } catch (Exception e) { throw new BadRequestException("invalid container id, " + id); } @@ -224,7 +224,7 @@ public Response getLogs(@PathParam("containerid") String containerIdStr, @QueryParam("size") String size) { ContainerId containerId; try { - containerId = ConverterUtils.toContainerId(containerIdStr); + containerId = ContainerId.fromString(containerIdStr); } catch (IllegalArgumentException ex) { return Response.status(Status.BAD_REQUEST).build(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java index 95e2a6537bb..f55ca810d8d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java @@ -42,14 +42,14 @@ public AppInfo() { } // JAXB needs this public AppInfo(final Application app) { - this.id = ConverterUtils.toString(app.getAppId()); + this.id = app.getAppId().toString(); this.state = app.getApplicationState().toString(); this.user = app.getUser(); this.containerids = new ArrayList(); Map appContainers = app.getContainers(); for (ContainerId containerId : appContainers.keySet()) { - String containerIdStr = ConverterUtils.toString(containerId); + String containerIdStr = containerId.toString(); containerids.add(containerIdStr); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java index 9fb8ebf43e5..5f9b8830a14 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java @@ -118,7 +118,7 @@ public void testClearLocalDirWhenNodeReboot() throws IOException, ContainerId cId = createContainerId(); URL localResourceUri = - ConverterUtils.getYarnUrlFromPath(localFS.makeQualified(new Path( + URL.fromPath(localFS.makeQualified(new Path( localResourceDir.getAbsolutePath()))); LocalResource localResource = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java index b3d44f526ef..ee2677ce5d2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java @@ -741,7 +741,7 @@ public void startContainer() ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java index 980c76440a9..b3ad31821d4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java @@ -200,7 +200,7 @@ public static void startContainer(NodeManager nm, ContainerId cId, .getCanonicalHostName(), port); URL localResourceUri = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource localResource = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java index ce7e388590a..e3d3fa6ebe9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/api/protocolrecords/impl/pb/TestPBRecordImpl.java @@ -25,6 +25,7 @@ import java.net.URISyntaxException; import java.util.ArrayList; +import org.apache.hadoop.yarn.api.records.URL; import org.junit.Assert; import org.apache.hadoop.conf.Configuration; @@ -62,8 +63,7 @@ static RecordFactory createPBRecordFactory() { static LocalResource createResource() { LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); assertTrue(ret instanceof LocalResourcePBImpl); - ret.setResource(ConverterUtils.getYarnUrlFromPath(new Path( - "hdfs://y.ak:8020/foo/bar"))); + ret.setResource(URL.fromPath(new Path("hdfs://y.ak:8020/foo/bar"))); ret.setSize(4344L); ret.setTimestamp(3141592653589793L); ret.setVisibility(LocalResourceVisibility.PUBLIC); @@ -76,7 +76,7 @@ static LocalResourceStatus createLocalResourceStatus() { assertTrue(ret instanceof LocalResourceStatusPBImpl); ret.setResource(createResource()); ret.setLocalPath( - ConverterUtils.getYarnUrlFromPath( + URL.fromPath( new Path("file:///local/foo/bar"))); ret.setStatus(ResourceStatusType.FETCH_SUCCESS); ret.setLocalSize(4443L); @@ -109,8 +109,8 @@ static LocalizerHeartbeatResponse createLocalizerHeartbeatResponse() ResourceLocalizationSpec resource = recordFactory.newRecordInstance(ResourceLocalizationSpec.class); resource.setResource(rsrc); - resource.setDestinationDirectory(ConverterUtils - .getYarnUrlFromPath(new Path("/tmp" + System.currentTimeMillis()))); + resource.setDestinationDirectory( + URL.fromPath((new Path("/tmp" + System.currentTimeMillis())))); rsrcs.add(resource); ret.setResourceSpecs(rsrcs); System.out.println(resource); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index 10b9155dd69..1f803b4e672 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -199,7 +199,7 @@ public void testContainerSetup() throws Exception { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(file.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); @@ -229,8 +229,8 @@ public void testContainerSetup() throws Exception { // Now ascertain that the resources are localised correctly. ApplicationId appId = cId.getApplicationAttemptId().getApplicationId(); - String appIDStr = ConverterUtils.toString(appId); - String containerIDStr = ConverterUtils.toString(cId); + String appIDStr = appId.toString(); + String containerIDStr = cId.toString(); File userCacheDir = new File(localDir, ContainerLocalizer.USERCACHE); File userDir = new File(userCacheDir, user); File appCache = new File(userDir, ContainerLocalizer.APPCACHE); @@ -288,7 +288,7 @@ public void testContainerLaunchAndStop() throws IOException, recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -395,7 +395,7 @@ protected void testContainerLaunchAndExit(int exitCode) throws IOException, recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -488,7 +488,7 @@ public void testLocalFilesCleanup() throws InterruptedException, // containerLaunchContext.resources = // new HashMap(); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(FileContext.getLocalFSFileContext() + URL.fromPath(FileContext.getLocalFSFileContext() .makeQualified(new Path(file.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); rsrc_alpha.setResource(resource_alpha); @@ -521,8 +521,8 @@ public void testLocalFilesCleanup() throws InterruptedException, ApplicationState.RUNNING); // Now ascertain that the resources are localised correctly. - String appIDStr = ConverterUtils.toString(appId); - String containerIDStr = ConverterUtils.toString(cId); + String appIDStr = appId.toString(); + String containerIDStr = cId.toString(); File userCacheDir = new File(localDir, ContainerLocalizer.USERCACHE); File userDir = new File(userCacheDir, user); File appCache = new File(userDir, ContainerLocalizer.APPCACHE); @@ -975,7 +975,7 @@ public void testIncreaseContainerResourceWithInvalidResource() throws Exception ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1059,7 +1059,7 @@ public void testChangeContainerResource() throws Exception { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1181,7 +1181,7 @@ private void testContainerLaunchAndSignal(SignalContainerCommand command) ContainerId cId = createContainerId(0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java index 61477a7ad56..b7d0e480047 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManagerRecovery.java @@ -415,7 +415,7 @@ public void testContainerResizeRecovery() throws Exception { fileWriter.close(); FileContext localFS = FileContext.getLocalFSFileContext(); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = RecordFactoryProvider .getRecordFactory(null).newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index cf7ca8db0de..a558338ee37 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -538,7 +538,7 @@ private void verifyTailErrorLogOnContainerExit(Configuration conf, when(container.getContainerId()).thenReturn(containerId); when(container.getUser()).thenReturn("test"); String relativeContainerLogDir = ContainerLaunch.getRelativeContainerLogDir( - appId.toString(), ConverterUtils.toString(containerId)); + appId.toString(), containerId.toString()); Path containerLogDir = dirsHandler.getLogPathForWrite(relativeContainerLogDir, false); @@ -744,7 +744,7 @@ public void testContainerEnvVariables() throws Exception { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -945,7 +945,7 @@ private void internalKillTest(boolean delayed) throws Exception { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1284,7 +1284,7 @@ public void testKillProcessGroup() throws Exception { // upload the script file so that the container can run it URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java index 611fc05137c..fac708655f5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java @@ -404,7 +404,7 @@ static ResourceLocalizationSpec getMockRsrc(Random r, when(resourceLocalizationSpec.getResource()).thenReturn(rsrc); when(resourceLocalizationSpec.getDestinationDirectory()). - thenReturn(ConverterUtils.getYarnUrlFromPath(p)); + thenReturn(URL.fromPath(p)); return resourceLocalizationSpec; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java index 81446f5a875..13310ad5100 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalResource.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourceRequest; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -39,8 +40,10 @@ public class TestLocalResource { static org.apache.hadoop.yarn.api.records.LocalResource getYarnResource(Path p, long size, long timestamp, LocalResourceType type, LocalResourceVisibility state, String pattern) throws URISyntaxException { - org.apache.hadoop.yarn.api.records.LocalResource ret = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(org.apache.hadoop.yarn.api.records.LocalResource.class); - ret.setResource(ConverterUtils.getYarnUrlFromURI(p.toUri())); + org.apache.hadoop.yarn.api.records.LocalResource ret = + RecordFactoryProvider.getRecordFactory(null).newRecordInstance( + org.apache.hadoop.yarn.api.records.LocalResource.class); + ret.setResource(URL.fromURI(p.toUri())); ret.setSize(size); ret.setTimestamp(timestamp); ret.setType(type); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java index c612c14c091..f594d8cf363 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java @@ -945,7 +945,7 @@ public boolean matches(Object o) { // Sigh. Thread init of private localizer not accessible Thread.sleep(1000); dispatcher.await(); - String appStr = ConverterUtils.toString(appId); + String appStr = appId.toString(); String ctnrStr = c.getContainerId().toString(); ArgumentCaptor contextCaptor = ArgumentCaptor .forClass(LocalizerStartContext.class); @@ -2144,12 +2144,16 @@ public void testParallelDownloadAttemptsForPublicResource() throws Exception { // removing pending download request. spyService.getPublicLocalizer().pending.clear(); + LocalizerContext lc = mock(LocalizerContext.class); + when(lc.getContainerId()).thenReturn(ContainerId.newContainerId( + ApplicationAttemptId.newInstance(ApplicationId.newInstance(1L, 1), 1), + 1L)); + // Now I need to simulate a race condition wherein Event is added to // dispatcher before resource state changes to either FAILED or LOCALIZED // Hence sending event directly to dispatcher. LocalizerResourceRequestEvent localizerEvent = - new LocalizerResourceRequestEvent(lr, null, - mock(LocalizerContext.class), null); + new LocalizerResourceRequestEvent(lr, null, lc, null); dispatcher1.getEventHandler().handle(localizerEvent); // Waiting for download to start. This should return false as new download @@ -2457,7 +2461,7 @@ public void testFailedDirsResourceRelease() throws Exception { BuilderUtils.newApplicationId(314159265358979L, 3); when(app.getUser()).thenReturn(user); when(app.getAppId()).thenReturn(appId); - when(app.toString()).thenReturn(ConverterUtils.toString(appId)); + when(app.toString()).thenReturn(appId.toString()); // init container. final Container c = getMockContainer(appId, 42, user); @@ -2468,17 +2472,16 @@ public void testFailedDirsResourceRelease() throws Exception { Path usersdir = new Path(tmpDirs.get(i), ContainerLocalizer.USERCACHE); Path userdir = new Path(usersdir, user); Path allAppsdir = new Path(userdir, ContainerLocalizer.APPCACHE); - Path appDir = new Path(allAppsdir, ConverterUtils.toString(appId)); + Path appDir = new Path(allAppsdir, appId.toString()); Path containerDir = - new Path(appDir, ConverterUtils.toString(c.getContainerId())); + new Path(appDir, c.getContainerId().toString()); containerLocalDirs.add(containerDir); appLocalDirs.add(appDir); Path sysDir = new Path(tmpDirs.get(i), ResourceLocalizationService.NM_PRIVATE_DIR); - Path appSysDir = new Path(sysDir, ConverterUtils.toString(appId)); - Path containerSysDir = - new Path(appSysDir, ConverterUtils.toString(c.getContainerId())); + Path appSysDir = new Path(sysDir, appId.toString()); + Path containerSysDir = new Path(appSysDir, c.getContainerId().toString()); nmLocalContainerDirs.add(containerSysDir); nmLocalAppDirs.add(appSysDir); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestAppLogAggregatorImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestAppLogAggregatorImpl.java index 0127923b0d3..f929ca86fda 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestAppLogAggregatorImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestAppLogAggregatorImpl.java @@ -108,10 +108,8 @@ public void testAggregatorWithRetentionPolicyDisabledShouldUploadAllFiles() final ContainerId containerId = ContainerId.newContainerId(attemptId, 0); // create artificial log files - final File appLogDir = new File(LOCAL_LOG_DIR, - ConverterUtils.toString(applicationId)); - final File containerLogDir = new File(appLogDir, - ConverterUtils.toString(containerId)); + final File appLogDir = new File(LOCAL_LOG_DIR, applicationId.toString()); + final File containerLogDir = new File(appLogDir, containerId.toString()); containerLogDir.mkdirs(); final Set logFiles = createContainerLogFiles(containerLogDir, 3); @@ -135,9 +133,9 @@ public void testAggregatorWhenNoFileOlderThanRetentionPolicyShouldUploadAll() // create artificial log files final File appLogDir = new File(LOCAL_LOG_DIR, - ConverterUtils.toString(applicationId)); + applicationId.toString()); final File containerLogDir = new File(appLogDir, - ConverterUtils.toString(containerId)); + containerId.toString()); containerLogDir.mkdirs(); final Set logFiles = createContainerLogFiles(containerLogDir, 3); @@ -163,9 +161,9 @@ public void testAggregatorWhenAllFilesOlderThanRetentionShouldUploadNone() // create artificial log files final File appLogDir = new File(LOCAL_LOG_DIR, - ConverterUtils.toString(applicationId)); + applicationId.toString()); final File containerLogDir = new File(appLogDir, - ConverterUtils.toString(containerId)); + containerId.toString()); containerLogDir.mkdirs(); final Set logFiles = createContainerLogFiles(containerLogDir, 3); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index 3961e1ac518..92c6b805b75 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -194,7 +194,7 @@ private void verifyLocalFileDeletion( // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -221,7 +221,7 @@ private void verifyLocalFileDeletion( verify(delSrvc).delete(eq(user), eq((Path) null), eq(new Path(app1LogDir.getAbsolutePath()))); - String containerIdStr = ConverterUtils.toString(container11); + String containerIdStr = container11.toString(); File containerLogDir = new File(app1LogDir, containerIdStr); int count = 0; int maxAttempts = 50; @@ -315,7 +315,7 @@ public void testNoLogsUploadedOnAppFinish() throws Exception { logAggregationService.start(); ApplicationId app = BuilderUtils.newApplicationId(1234, 1); - File appLogDir = new File(localLogDir, ConverterUtils.toString(app)); + File appLogDir = new File(localLogDir, app.toString()); appLogDir.mkdir(); LogAggregationContext context = LogAggregationContext.newInstance("HOST*", "sys*"); @@ -352,7 +352,7 @@ public void testNoContainerOnNode() throws Exception { // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -402,7 +402,7 @@ public void testMultipleAppsLogAggregation() throws Exception { // AppLogDir should be created File app1LogDir = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); app1LogDir.mkdir(); logAggregationService .handle(new LogHandlerAppStartedEvent( @@ -423,7 +423,7 @@ public void testMultipleAppsLogAggregation() throws Exception { BuilderUtils.newApplicationAttemptId(application2, 1); File app2LogDir = - new File(localLogDir, ConverterUtils.toString(application2)); + new File(localLogDir, application2.toString()); app2LogDir.mkdir(); LogAggregationContext contextWithAMOnly = Records.newRecord(LogAggregationContext.class); @@ -452,7 +452,7 @@ public void testMultipleAppsLogAggregation() throws Exception { BuilderUtils.newApplicationAttemptId(application3, 1); File app3LogDir = - new File(localLogDir, ConverterUtils.toString(application3)); + new File(localLogDir, application3.toString()); app3LogDir.mkdir(); LogAggregationContext contextWithAMAndFailed = Records.newRecord(LogAggregationContext.class); @@ -583,7 +583,7 @@ public void testVerifyAndCreateRemoteDirsFailure() BuilderUtils.newApplicationId(System.currentTimeMillis(), (int) (Math.random() * 1000)); File appLogDir = - new File(localLogDir, ConverterUtils.toString(appId2)); + new File(localLogDir, appId2.toString()); appLogDir.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(appId2, this.user, null, this.acls, contextWithAMAndFailed)); @@ -758,7 +758,7 @@ public void testLogAggregationCreateDirsFailsWithoutKillingNM() (int) (Math.random() * 1000)); File appLogDir = - new File(localLogDir, ConverterUtils.toString(appId)); + new File(localLogDir, appId.toString()); appLogDir.mkdir(); Exception e = new RuntimeException("KABOOM!"); @@ -805,7 +805,7 @@ public void testLogAggregationCreateDirsFailsWithoutKillingNM() private void writeContainerLogs(File appLogDir, ContainerId containerId, String[] fileName) throws IOException { // ContainerLogDir should be created - String containerStr = ConverterUtils.toString(containerId); + String containerStr = containerId.toString(); File containerLogDir = new File(appLogDir, containerStr); boolean created = containerLogDir.mkdirs(); LOG.info("Created Dir:" + containerLogDir.getAbsolutePath() + " status :" @@ -943,7 +943,7 @@ private LogFileStatusInLastCycle verifyContainerLogs( Assert.assertTrue("number of containers with logs should be at most " + minNumOfContainers,logMap.size() <= maxNumOfContainers); for (ContainerId cId : expectedContainerIds) { - String containerStr = ConverterUtils.toString(cId); + String containerStr = cId.toString(); Map thisContainerMap = logMap.remove(containerStr); Assert.assertEquals(numOfLogsPerContainer, thisContainerMap.size()); for (String fileType : logFiles) { @@ -998,7 +998,7 @@ public void testLogAggregationForRealContainerLaunch() throws IOException, ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); @@ -1435,7 +1435,7 @@ public void testLogAggregationServiceWithPatterns() throws Exception { // has only logs from stdout and syslog // AppLogDir should be created File appLogDir1 = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); appLogDir1.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(application1, this.user, null, this.acls, @@ -1460,7 +1460,7 @@ public void testLogAggregationServiceWithPatterns() throws Exception { BuilderUtils.newApplicationAttemptId(application2, 1); File app2LogDir = - new File(localLogDir, ConverterUtils.toString(application2)); + new File(localLogDir, application2.toString()); app2LogDir.mkdir(); LogAggregationContextWithExcludePatterns.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -1485,7 +1485,7 @@ public void testLogAggregationServiceWithPatterns() throws Exception { ApplicationAttemptId appAttemptId3 = BuilderUtils.newApplicationAttemptId(application3, 1); File app3LogDir = - new File(localLogDir, ConverterUtils.toString(application3)); + new File(localLogDir, application3.toString()); app3LogDir.mkdir(); context1.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -1510,7 +1510,7 @@ public void testLogAggregationServiceWithPatterns() throws Exception { ApplicationAttemptId appAttemptId4 = BuilderUtils.newApplicationAttemptId(application4, 1); File app4LogDir = - new File(localLogDir, ConverterUtils.toString(application4)); + new File(localLogDir, application4.toString()); app4LogDir.mkdir(); context2.setLogAggregationPolicyClassName( AMOnlyLogAggregationPolicy.class.getName()); @@ -2012,7 +2012,7 @@ private ContainerId finishContainer(ApplicationId application1, containerType); // Simulate log-file creation File appLogDir1 = - new File(localLogDir, ConverterUtils.toString(application1)); + new File(localLogDir, application1.toString()); appLogDir1.mkdir(); writeContainerLogs(appLogDir1, containerId, logFiles); @@ -2123,7 +2123,7 @@ private void testLogAggregationService(boolean retentionSizeLimitation) // AppLogDir should be created File appLogDir = - new File(localLogDir, ConverterUtils.toString(application)); + new File(localLogDir, application.toString()); appLogDir.mkdir(); logAggregationService.handle(new LogHandlerAppStartedEvent(application, this.user, null, this.acls, logAggregationContextWithInterval)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java index 94145e40f61..1b4e3b7d77c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java @@ -210,7 +210,7 @@ public void testContainerKillOnMemoryOverflow() throws IOException, ContainerId cId = ContainerId.newContainerId(appAttemptId, 0); URL resource_alpha = - ConverterUtils.getYarnUrlFromPath(localFS + URL.fromPath(localFS .makeQualified(new Path(scriptFile.getAbsolutePath()))); LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java index 2f409c82463..d254e4be197 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/recovery/TestNMLeveldbStateStoreService.java @@ -374,7 +374,7 @@ public void testStartResourceLocalization() throws IOException { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -407,7 +407,7 @@ public void testStartResourceLocalization() throws IOException { // start some public and private resources Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -416,7 +416,7 @@ public void testStartResourceLocalization() throws IOException { pubRsrcLocalPath1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -425,7 +425,7 @@ public void testStartResourceLocalization() throws IOException { pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); @@ -470,7 +470,7 @@ public void testFinishResourceLocalization() throws IOException { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -510,7 +510,7 @@ public void testFinishResourceLocalization() throws IOException { // start some public and private resources Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -519,7 +519,7 @@ public void testFinishResourceLocalization() throws IOException { pubRsrcLocalPath1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -528,7 +528,7 @@ public void testFinishResourceLocalization() throws IOException { pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); @@ -589,7 +589,7 @@ public void testRemoveLocalizedResource() throws IOException { Path appRsrcPath = new Path("hdfs://some/app/resource"); LocalResourcePBImpl rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(appRsrcPath), + URL.fromPath(appRsrcPath), LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION, 123L, 456L); LocalResourceProto appRsrcProto = rsrcPb.getProto(); @@ -619,7 +619,7 @@ public void testRemoveLocalizedResource() throws IOException { // add public and private resources and remove some Path pubRsrcPath1 = new Path("hdfs://some/public/resource1"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath1), + URL.fromPath(pubRsrcPath1), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto1 = rsrcPb.getProto(); @@ -635,7 +635,7 @@ public void testRemoveLocalizedResource() throws IOException { stateStore.finishResourceLocalization(null, null, pubLocalizedProto1); Path pubRsrcPath2 = new Path("hdfs://some/public/resource2"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(pubRsrcPath2), + URL.fromPath(pubRsrcPath2), LocalResourceType.FILE, LocalResourceVisibility.PUBLIC, 789L, 135L); LocalResourceProto pubRsrcProto2 = rsrcPb.getProto(); @@ -652,7 +652,7 @@ public void testRemoveLocalizedResource() throws IOException { stateStore.removeLocalizedResource(null, null, pubRsrcLocalPath2); Path privRsrcPath = new Path("hdfs://some/private/resource"); rsrcPb = (LocalResourcePBImpl) LocalResource.newInstance( - ConverterUtils.getYarnUrlFromPath(privRsrcPath), + URL.fromPath(privRsrcPath), LocalResourceType.PATTERN, LocalResourceVisibility.PRIVATE, 789L, 680L, "*pattern*"); LocalResourceProto privRsrcProto = rsrcPb.getProto(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java index b90c1be0b58..3f71179fca2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java @@ -249,7 +249,7 @@ private void writeContainerLogs(Context nmContext, containerLogDir.mkdirs(); for (String fileType : new String[] { "stdout", "stderr", "syslog" }) { Writer writer = new FileWriter(new File(containerLogDir, fileType)); - writer.write(ConverterUtils.toString(containerId) + "\n Hello " + writer.write(containerId.toString() + "\n Hello " + fileType + "!"); writer.close(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java index 18239f16633..7ec8f274051 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java @@ -40,6 +40,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.util.NodeHealthScriptRunner; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; @@ -280,7 +281,7 @@ public void testNodeHelper(String path, String media) throws JSONException, verifyNodeContainerInfo( conInfo.getJSONObject(i), nmContext.getContainers().get( - ConverterUtils.toContainerId(conInfo.getJSONObject(i).getString( + ContainerId.fromString(conInfo.getJSONObject(i).getString( "id")))); } } @@ -316,7 +317,7 @@ public void testNodeSingleContainersHelper(String media) assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); verifyNodeContainerInfo(json.getJSONObject("container"), nmContext - .getContainers().get(ConverterUtils.toContainerId(id))); + .getContainers().get(ContainerId.fromString(id))); } } @@ -449,7 +450,7 @@ public void testNodeSingleContainerXML() throws JSONException, Exception { NodeList nodes = dom.getElementsByTagName("container"); assertEquals("incorrect number of elements", 1, nodes.getLength()); verifyContainersInfoXML(nodes, - nmContext.getContainers().get(ConverterUtils.toContainerId(id))); + nmContext.getContainers().get(ContainerId.fromString(id))); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java index 3634107c678..e36d96b0bb7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java @@ -1345,7 +1345,7 @@ private static void removeApplication(Configuration conf, String applicationId) rmStore.init(conf); rmStore.start(); try { - ApplicationId removeAppId = ConverterUtils.toApplicationId(applicationId); + ApplicationId removeAppId = ApplicationId.fromString(applicationId); LOG.info("Deleting application " + removeAppId + " from state store"); rmStore.removeApplication(removeAppId); LOG.info("Application is deleted from state store"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java index a6f096930e5..02f90ddb9af 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/LeveldbRMStateStore.java @@ -499,7 +499,7 @@ private int loadRMApp(RMState rmState, LeveldbIterator iter, String appIdStr, private ApplicationStateData createApplicationState(String appIdStr, byte[] data) throws IOException { - ApplicationId appId = ConverterUtils.toApplicationId(appIdStr); + ApplicationId appId = ApplicationId.fromString(appIdStr); ApplicationStateDataPBImpl appState = new ApplicationStateDataPBImpl( ApplicationStateDataProto.parseFrom(data)); @@ -545,8 +545,7 @@ ApplicationAttemptStateData loadRMAppAttemptState( private ApplicationAttemptStateData createAttemptState(String itemName, byte[] data) throws IOException { - ApplicationAttemptId attemptId = - ConverterUtils.toApplicationAttemptId(itemName); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString(itemName); ApplicationAttemptStateDataPBImpl attemptState = new ApplicationAttemptStateDataPBImpl( ApplicationAttemptStateDataProto.parseFrom(data)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java index 9afbf6d2045..9e05f6d9ff6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/ZKRMStateStore.java @@ -514,7 +514,7 @@ private synchronized void loadRMAppState(RMState rmState) throws Exception { if (LOG.isDebugEnabled()) { LOG.debug("Loading application from znode: " + childNodeName); } - ApplicationId appId = ConverterUtils.toApplicationId(childNodeName); + ApplicationId appId = ApplicationId.fromString(childNodeName); ApplicationStateDataPBImpl appState = new ApplicationStateDataPBImpl( ApplicationStateDataProto.parseFrom(childData)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java index 045c7bdc957..65491026a94 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/DynamicResourceConfiguration.java @@ -133,7 +133,7 @@ public Map getNodeResourceMap() { = new HashMap (); for (String node : nodes) { - NodeId nid = ConverterUtils.toNodeId(node); + NodeId nid = NodeId.fromString(node); int vcores = getVcoresPerNode(node); int memory = getMemoryPerNode(node); int overCommitTimeout = getOverCommitTimeoutPerNode(node); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java index 512149385d5..95f81d43dd6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmcontainer/RMContainerImpl.java @@ -346,7 +346,7 @@ public String getLogURL() { logURL.append(WebAppUtils.getHttpSchemePrefix(rmContext .getYarnConfiguration())); logURL.append(WebAppUtils.getRunningLogURL( - container.getNodeHttpAddress(), ConverterUtils.toString(containerId), + container.getNodeHttpAddress(), containerId.toString(), user)); return logURL.toString(); } finally { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java index b4d792143a6..305f1d5acc3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java @@ -83,8 +83,8 @@ protected void renderData(Block html) { } AppInfo app = new AppInfo(appReport); - ApplicationAttemptId appAttemptId = - ConverterUtils.toApplicationAttemptId(app.getCurrentAppAttemptId()); + ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString( + app.getCurrentAppAttemptId()); String queuePercent = "N/A"; String clusterPercent = "N/A"; if(appReport.getApplicationResourceUsageReport() != null) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java index de2a23f786f..0f1a590dfbe 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebAppFilter.java @@ -220,7 +220,7 @@ private String ahsRedirectPath(String uri, RMWebApp rmWebApp) { break; case "appattempt": try{ - appAttemptId = ConverterUtils.toApplicationAttemptId(parts[3]); + appAttemptId = ApplicationAttemptId.fromString(parts[3]); } catch (IllegalArgumentException e) { LOG.debug("Error parsing {} as an ApplicationAttemptId", parts[3], e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java index d05d95202de..878bf65ad14 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java @@ -113,6 +113,7 @@ import org.apache.hadoop.yarn.api.records.ReservationRequestInterpreter; import org.apache.hadoop.yarn.api.records.ReservationRequests; import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; @@ -373,7 +374,7 @@ public NodeInfo getNode(@PathParam("nodeId") String nodeId) { if (sched == null) { throw new NotFoundException("Null ResourceScheduler instance"); } - NodeId nid = ConverterUtils.toNodeId(nodeId); + NodeId nid = NodeId.fromString(nodeId); RMNode ni = this.rm.getRMContext().getRMNodes().get(nid); boolean isInactive = false; if (ni == null) { @@ -1467,9 +1468,7 @@ protected ApplicationSubmissionContext createAppSubmissionContext( String error = "Could not parse application id " + newApp.getApplicationId(); try { - appid = - ConverterUtils.toApplicationId(recordFactory, - newApp.getApplicationId()); + appid = ApplicationId.fromString(newApp.getApplicationId()); } catch (Exception e) { throw new BadRequestException(error); } @@ -1553,7 +1552,7 @@ protected ContainerLaunchContext createContainerLaunchContext( LocalResourceInfo l = entry.getValue(); LocalResource lr = LocalResource.newInstance( - ConverterUtils.getYarnUrlFromURI(l.getUrl()), l.getType(), + URL.fromURI(l.getUrl()), l.getType(), l.getVisibility(), l.getSize(), l.getTimestamp()); hlr.put(entry.getKey(), lr); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java index e8c8bca5551..55bf999b047 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppAttemptInfo.java @@ -67,7 +67,7 @@ public AppAttemptInfo(ResourceManager rm, RMAppAttempt attempt, String user, this.nodeId = masterContainer.getNodeId().toString(); this.logsLink = WebAppUtils.getRunningLogURL(schemePrefix + masterContainer.getNodeHttpAddress(), - ConverterUtils.toString(masterContainer.getId()), user); + masterContainer.getId().toString(), user); nodesBlacklistedBySystem = StringUtils.join(attempt.getAMBlacklistManager() diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java index 63b601deab0..c5c02a806de 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java @@ -165,8 +165,7 @@ public AppInfo(ResourceManager rm, RMApp app, Boolean hasAccess, this.amContainerLogsExist = true; this.amContainerLogs = WebAppUtils.getRunningLogURL( schemePrefix + masterContainer.getNodeHttpAddress(), - ConverterUtils.toString(masterContainer.getId()), - app.getUser()); + masterContainer.getId().toString(), app.getUser()); this.amHostHttpAddress = masterContainer.getNodeHttpAddress(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java index b109639a55a..af342df6057 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java @@ -218,7 +218,7 @@ public void testRefreshNodesResourceWithFileSystemBasedConfigurationProvider() fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); @@ -257,7 +257,7 @@ public void testRefreshNodesResourceWithResourceReturnInRegistration() fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); @@ -307,7 +307,7 @@ public void testRefreshNodesResourceWithResourceReturnInHeartbeat() fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); @@ -355,7 +355,7 @@ public void testResourcePersistentForNMRegistrationWithNewResource() fail("Should not get any exceptions"); } - NodeId nid = ConverterUtils.toNodeId("h1:1234"); + NodeId nid = NodeId.fromString("h1:1234"); RMNode ni = rm.getRMContext().getRMNodes().get(nid); Resource resource = ni.getTotalCapability(); Assert.assertEquals("", resource.toString()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java index d46ed051c91..758bbae3c2a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStoreTestBase.java @@ -183,7 +183,7 @@ protected RMAppAttempt storeAttempt(RMStateStore store, RMAppAttemptMetrics mockRmAppAttemptMetrics = mock(RMAppAttemptMetrics.class); Container container = new ContainerPBImpl(); - container.setId(ConverterUtils.toContainerId(containerIdStr)); + container.setId(ContainerId.fromString(containerIdStr)); RMAppAttempt mockAttempt = mock(RMAppAttempt.class); when(mockAttempt.getAppAttemptId()).thenReturn(attemptId); when(mockAttempt.getMasterContainer()).thenReturn(container); @@ -227,8 +227,8 @@ void testRMAppStateStore(RMStateStoreHelper stateStoreHelper, ClientToAMTokenSecretManagerInRM clientToAMTokenMgr = new ClientToAMTokenSecretManagerInRM(); - ApplicationAttemptId attemptId1 = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0001_000001"); + ApplicationAttemptId attemptId1 = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0001_000001"); ApplicationId appId1 = attemptId1.getApplicationId(); storeApp(store, appId1, submitTime, startTime); verifier.afterStoreApp(store, appId1); @@ -245,8 +245,8 @@ void testRMAppStateStore(RMStateStoreHelper stateStoreHelper, .getMasterContainer().getId(); String appAttemptIdStr2 = "appattempt_1352994193343_0001_000002"; - ApplicationAttemptId attemptId2 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr2); + ApplicationAttemptId attemptId2 = ApplicationAttemptId.fromString( + appAttemptIdStr2); // create application token and client token key for attempt2 Token appAttemptToken2 = @@ -259,8 +259,8 @@ void testRMAppStateStore(RMStateStoreHelper stateStoreHelper, appAttemptToken2, clientTokenKey2, dispatcher) .getMasterContainer().getId(); - ApplicationAttemptId attemptIdRemoved = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0002_000001"); + ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0002_000001"); ApplicationId appIdRemoved = attemptIdRemoved.getApplicationId(); storeApp(store, appIdRemoved, submitTime, startTime); storeAttempt(store, attemptIdRemoved, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java index a51ccb53463..61088e1f645 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestFSRMStateStore.java @@ -183,7 +183,7 @@ public void testFSRMStateStore() throws Exception { (FileSystemRMStateStore) fsTester.getRMStateStore(); String appAttemptIdStr3 = "appattempt_1352994193343_0001_000003"; ApplicationAttemptId attemptId3 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr3); + ApplicationAttemptId.fromString(appAttemptIdStr3); Path appDir = fsTester.store.getAppDir(attemptId3.getApplicationId().toString()); Path tempAppAttemptFile = @@ -364,7 +364,7 @@ protected void modifyAppState() throws Exception { // imitate appAttemptFile1 is still .new, but old one is deleted String appAttemptIdStr1 = "appattempt_1352994193343_0001_000001"; ApplicationAttemptId attemptId1 = - ConverterUtils.toApplicationAttemptId(appAttemptIdStr1); + ApplicationAttemptId.fromString(appAttemptIdStr1); Path appDir = fsTester.store.getAppDir(attemptId1.getApplicationId().toString()); Path appAttemptFile1 = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java index 7df31cf43ee..19d30641e3d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestZKRMStateStore.java @@ -34,6 +34,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; +import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl; @@ -399,14 +400,14 @@ public void testFencedState() throws Exception { // Add a new attempt ClientToAMTokenSecretManagerInRM clientToAMTokenMgr = new ClientToAMTokenSecretManagerInRM(); - ApplicationAttemptId attemptId = ConverterUtils - .toApplicationAttemptId("appattempt_1234567894321_0001_000001"); + ApplicationAttemptId attemptId = ApplicationAttemptId.fromString( + "appattempt_1234567894321_0001_000001"); SecretKey clientTokenMasterKey = clientToAMTokenMgr.createMasterKey(attemptId); RMAppAttemptMetrics mockRmAppAttemptMetrics = mock(RMAppAttemptMetrics.class); Container container = new ContainerPBImpl(); - container.setId(ConverterUtils.toContainerId("container_1234567891234_0001_01_000001")); + container.setId(ContainerId.fromString("container_1234567891234_0001_01_000001")); RMAppAttempt mockAttempt = mock(RMAppAttempt.class); when(mockAttempt.getAppAttemptId()).thenReturn(attemptId); when(mockAttempt.getMasterContainer()).thenReturn(container); @@ -491,8 +492,8 @@ public void testDuplicateRMAppDeletion() throws Exception { TestDispatcher dispatcher = new TestDispatcher(); store.setRMDispatcher(dispatcher); - ApplicationAttemptId attemptIdRemoved = ConverterUtils - .toApplicationAttemptId("appattempt_1352994193343_0002_000001"); + ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.fromString( + "appattempt_1352994193343_0002_000001"); ApplicationId appIdRemoved = attemptIdRemoved.getApplicationId(); storeApp(store, appIdRemoved, submitTime, startTime); storeAttempt(store, attemptIdRemoved, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java index c7ef8fadbd7..682ed75391e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java @@ -57,6 +57,7 @@ import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.LocalResource; @@ -65,6 +66,7 @@ import org.apache.hadoop.yarn.api.records.LogAggregationContext; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.ReservationId; +import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockNM; @@ -859,7 +861,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia) RMApp app = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appId)); + .get(ApplicationId.fromString(appId)); assertEquals(appName, app.getName()); assertEquals(webserviceUserName, app.getUser()); assertEquals(2, app.getMaxAppAttempts()); @@ -877,8 +879,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia) Map appLRs = ctx.getLocalResources(); assertTrue(appLRs.containsKey(lrKey)); LocalResource exampleLR = appLRs.get(lrKey); - assertEquals(ConverterUtils.getYarnUrlFromURI(y.getUrl()), - exampleLR.getResource()); + assertEquals(URL.fromURI(y.getUrl()), exampleLR.getResource()); assertEquals(y.getSize(), exampleLR.getSize()); assertEquals(y.getTimestamp(), exampleLR.getTimestamp()); assertEquals(y.getType(), exampleLR.getType()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java index 36e24ecf189..4e26bd1ce6e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java @@ -50,6 +50,7 @@ import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; @@ -236,11 +237,11 @@ public void testDelegationTokenAuth() throws Exception { boolean appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); RMApp actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); String owner = actualApp.getUser(); assertEquals("client", owner); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java index 2f6a02287c4..249e8250497 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebappAuthentication.java @@ -37,6 +37,7 @@ import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.KerberosTestUtils; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -236,11 +237,11 @@ private void testAnonymousSimpleUser() throws Exception { assertEquals(Status.ACCEPTED.getStatusCode(), conn.getResponseCode()); boolean appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); RMApp actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); String owner = actualApp.getUser(); assertEquals( rm.getConfig().get(CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER, @@ -259,11 +260,11 @@ private void testAnonymousSimpleUser() throws Exception { conn.getInputStream(); appExists = rm.getRMContext().getRMApps() - .containsKey(ConverterUtils.toApplicationId(appid)); + .containsKey(ApplicationId.fromString(appid)); assertTrue(appExists); actualApp = rm.getRMContext().getRMApps() - .get(ConverterUtils.toApplicationId(appid)); + .get(ApplicationId.fromString(appid)); owner = actualApp.getUser(); assertEquals("client", owner); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java index 231ca7241af..958b54e623b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java @@ -481,7 +481,7 @@ private static ApplicationId parseApplicationId(String appIdStr) { ApplicationId appId = null; if (appIdStr.startsWith(ApplicationId.appIdStrPrefix)) { try { - appId = ConverterUtils.toApplicationId(appIdStr); + appId = ApplicationId.fromString(appIdStr); } catch (IllegalArgumentException e) { appId = null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java index db241a891d2..884b5cd18aa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/EntityGroupPlugInForTest.java @@ -34,15 +34,16 @@ class EntityGroupPlugInForTest extends TimelineEntityGroupPlugin { public Set getTimelineEntityGroupId(String entityType, NameValuePair primaryFilter, Collection secondaryFilters) { - ApplicationId appId - = ConverterUtils.toApplicationId(primaryFilter.getValue().toString()); + ApplicationId appId = ApplicationId.fromString( + primaryFilter.getValue().toString()); return Sets.newHashSet(getStandardTimelineGroupId(appId)); } @Override public Set getTimelineEntityGroupId(String entityId, String entityType) { - ApplicationId appId = ConverterUtils.toApplicationId(entityId); + ApplicationId appId = ApplicationId.fromString( + entityId); return Sets.newHashSet(getStandardTimelineGroupId(appId)); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java index d6baab67370..1c12f36192b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java @@ -68,7 +68,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { private static final String SAMPLE_APP_PREFIX_CACHE_TEST = "1234_000"; private static final int CACHE_TEST_CACHE_SIZE = 5; - + private static final String TEST_SUMMARY_LOG_FILE_NAME = EntityGroupFSTimelineStore.SUMMARY_LOG_PREFIX + "test"; private static final String TEST_DOMAIN_LOG_FILE_NAME @@ -117,7 +117,7 @@ public static void setupClass() throws Exception { sampleAppIds = new ArrayList<>(CACHE_TEST_CACHE_SIZE + 1); for (int i = 0; i < CACHE_TEST_CACHE_SIZE + 1; i++) { - ApplicationId appId = ConverterUtils.toApplicationId( + ApplicationId appId = ApplicationId.fromString( ConverterUtils.APPLICATION_PREFIX + "_" + SAMPLE_APP_PREFIX_CACHE_TEST + i); sampleAppIds.add(appId);