From a83fb61ac07c0468cbc7a38526e92683883dd932 Mon Sep 17 00:00:00 2001 From: Vinod Kumar Vavilapalli Date: Tue, 4 Jun 2013 04:05:50 +0000 Subject: [PATCH] YARN-635. Renamed YarnRemoteException to YarnException. Contributed by Siddharth Seth. MAPREDUCE-5301. Updated MR code to work with YARN-635 changes of renaming YarnRemoteException to YarnException. Contributed by Siddharth Seth git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1489283 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 3 ++ .../hadoop/mapred/LocalContainerLauncher.java | 4 +- .../mapred/TaskAttemptListenerImpl.java | 4 +- .../jobhistory/JobHistoryCopyService.java | 4 +- .../jobhistory/JobHistoryEventHandler.java | 22 ++++---- .../hadoop/mapreduce/v2/app/MRAppMaster.java | 16 +++--- .../v2/app/commit/CommitterEventHandler.java | 8 +-- .../mapreduce/v2/app/job/impl/JobImpl.java | 4 +- .../v2/app/job/impl/TaskAttemptImpl.java | 6 +-- .../app/launcher/ContainerLauncherImpl.java | 4 +- .../v2/app/local/LocalContainerAllocator.java | 6 +-- .../mapreduce/v2/app/rm/RMCommunicator.java | 8 +-- .../v2/app/rm/RMContainerAllocator.java | 8 +-- .../v2/app/rm/RMContainerRequestor.java | 8 +-- .../v2/app/speculate/DefaultSpeculator.java | 10 ++-- .../v2/app/webapp/AMWebServices.java | 16 +++--- .../TestJobHistoryEventHandler.java | 4 +- .../apache/hadoop/mapreduce/v2/app/MRApp.java | 8 +-- .../mapreduce/v2/app/MRAppBenchmark.java | 4 +- .../mapreduce/v2/app/TestMRAppMaster.java | 4 +- .../v2/app/TestRMContainerAllocator.java | 4 +- .../mapreduce/v2/app/TestStagingCleanup.java | 4 +- .../app/commit/TestCommitterEventHandler.java | 4 +- .../launcher/TestContainerLauncherImpl.java | 6 +-- .../local/TestLocalContainerAllocator.java | 16 +++--- .../hadoop/mapreduce/TypeConverter.java | 18 +++---- .../hadoop/mapreduce/v2/util/MRApps.java | 6 +-- .../hadoop/mapreduce/v2/TestRPCFactories.java | 8 +-- .../mapreduce/v2/TestRecordFactory.java | 6 +-- .../mapreduce/v2/hs/CachedHistoryStorage.java | 10 ++-- .../hadoop/mapreduce/v2/hs/CompletedJob.java | 6 +-- .../mapreduce/v2/hs/HistoryFileManager.java | 6 +-- .../hadoop/mapreduce/v2/hs/JobHistory.java | 6 +-- .../mapreduce/v2/hs/JobHistoryServer.java | 4 +- .../mapreduce/v2/hs/webapp/HsWebServices.java | 4 +- .../org/apache/hadoop/mapred/ClientCache.java | 4 +- .../hadoop/mapred/ClientServiceDelegate.java | 20 +++---- .../hadoop/mapred/ResourceMgrDelegate.java | 22 ++++---- .../org/apache/hadoop/mapred/YARNRunner.java | 12 ++--- .../hadoop/mapred/TestClientRedirect.java | 4 +- .../mapred/TestClientServiceDelegate.java | 12 ++--- .../hadoop/mapred/TestNetworkedJob.java | 6 +-- .../mapred/TestResourceMgrDelegate.java | 6 +-- .../mapreduce/v2/MiniMRYarnCluster.java | 6 +-- hadoop-yarn-project/CHANGES.txt | 3 ++ ...ception.java => YarnRuntimeException.java} | 12 +++-- .../apache/hadoop/yarn/api/AMRMProtocol.java | 14 ++--- .../hadoop/yarn/api/ClientRMProtocol.java | 50 ++++++++--------- .../hadoop/yarn/api/ContainerManager.java | 14 ++--- .../hadoop/yarn/api/RMAdminProtocol.java | 14 ++--- ...emoteException.java => YarnException.java} | 10 ++-- .../providers/RecordFactoryProvider.java | 10 ++-- .../distributedshell/ApplicationMaster.java | 12 ++--- .../applications/distributedshell/Client.java | 14 ++--- .../UnmanagedAMLauncher.java | 8 +-- .../apache/hadoop/yarn/client/AMRMClient.java | 14 ++--- .../hadoop/yarn/client/AMRMClientAsync.java | 16 +++--- .../hadoop/yarn/client/AMRMClientImpl.java | 12 ++--- .../apache/hadoop/yarn/client/NMClient.java | 14 ++--- .../hadoop/yarn/client/NMClientAsync.java | 8 +-- .../hadoop/yarn/client/NMClientImpl.java | 26 ++++----- .../apache/hadoop/yarn/client/RMAdmin.java | 14 ++--- .../apache/hadoop/yarn/client/YarnClient.java | 54 +++++++++---------- .../hadoop/yarn/client/YarnClientImpl.java | 28 +++++----- .../yarn/client/cli/ApplicationCLI.java | 14 ++--- .../hadoop/yarn/client/cli/NodeCLI.java | 10 ++-- .../hadoop/yarn/client/TestAMRMClient.java | 10 ++-- .../yarn/client/TestAMRMClientAsync.java | 4 +- .../hadoop/yarn/client/TestNMClient.java | 20 +++---- .../hadoop/yarn/client/TestNMClientAsync.java | 8 +-- .../hadoop/yarn/client/TestYarnClient.java | 6 +-- .../pb/client/AMRMProtocolPBClientImpl.java | 8 +-- .../client/ClientRMProtocolPBClientImpl.java | 26 ++++----- .../client/ContainerManagerPBClientImpl.java | 8 +-- .../client/RMAdminProtocolPBClientImpl.java | 14 ++--- .../pb/service/AMRMProtocolPBServiceImpl.java | 8 +-- .../ClientRMProtocolPBServiceImpl.java | 26 ++++----- .../ContainerManagerPBServiceImpl.java | 8 +-- .../service/RMAdminProtocolPBServiceImpl.java | 14 ++--- .../hadoop/yarn/event/AsyncDispatcher.java | 4 +- .../impl/pb/RecordFactoryPBImpl.java | 12 ++--- .../impl/pb/RpcClientFactoryPBImpl.java | 16 +++--- .../impl/pb/RpcServerFactoryPBImpl.java | 22 ++++---- .../factory/providers/RpcFactoryProvider.java | 10 ++-- .../org/apache/hadoop/yarn/ipc/RPCUtil.java | 24 ++++----- .../org/apache/hadoop/yarn/ipc/YarnRPC.java | 4 +- .../logaggregation/AggregatedLogFormat.java | 4 +- .../yarn/security/AdminACLsManager.java | 4 +- .../client/RMDelegationTokenIdentifier.java | 6 +-- .../hadoop/yarn/service/CompositeService.java | 4 +- .../state/InvalidStateTransitonException.java | 4 +- .../org/apache/hadoop/yarn/util/Apps.java | 4 +- .../hadoop/yarn/webapp/WebAppException.java | 4 +- .../hadoop/yarn/TestContainerLaunchRPC.java | 14 ++--- .../java/org/apache/hadoop/yarn/TestRPC.java | 14 ++--- .../apache/hadoop/yarn/TestRPCFactories.java | 14 ++--- .../apache/hadoop/yarn/TestRecordFactory.java | 4 +- .../hadoop/yarn/TestRpcFactoryProvider.java | 8 +-- .../TestYarnUncaughtExceptionHandler.java | 4 +- .../apache/hadoop/yarn/ipc/TestRPCUtil.java | 12 ++--- .../yarn/util/TestCompositeService.java | 10 ++-- .../yarn/server/api/ResourceTracker.java | 6 +-- .../client/ResourceTrackerPBClientImpl.java | 6 +-- .../service/ResourceTrackerPBServiceImpl.java | 6 +-- .../apache/hadoop/yarn/TestRPCFactories.java | 14 ++--- .../apache/hadoop/yarn/TestRecordFactory.java | 4 +- .../nodemanager/LocalDirsHandlerService.java | 10 ++-- .../yarn/server/nodemanager/NodeManager.java | 6 +-- .../nodemanager/NodeStatusUpdaterImpl.java | 18 +++---- .../nodemanager/api/LocalizationProtocol.java | 4 +- .../LocalizationProtocolPBClientImpl.java | 4 +- .../LocalizationProtocolPBServiceImpl.java | 4 +- .../ContainerManagerImpl.java | 16 +++--- .../InvalidContainerException.java | 4 +- .../NMNotYetReadyException.java | 4 +- .../launcher/ContainersLauncher.java | 4 +- .../localizer/ContainerLocalizer.java | 6 +-- .../ResourceLocalizationService.java | 10 ++-- .../logaggregation/LogAggregationService.java | 20 +++---- .../server/nodemanager/webapp/WebServer.java | 4 +- .../nodemanager/DummyContainerManager.java | 6 +-- .../server/nodemanager/LocalRMInterface.java | 6 +-- .../nodemanager/MockNodeStatusUpdater.java | 6 +-- .../TestContainerManagerWithLCE.java | 12 ++--- .../server/nodemanager/TestEventFlow.java | 4 +- .../TestLocalDirsHandlerService.java | 4 +- .../server/nodemanager/TestNodeManager.java | 4 +- .../nodemanager/TestNodeManagerReboot.java | 6 +-- .../nodemanager/TestNodeManagerResync.java | 10 ++-- .../nodemanager/TestNodeManagerShutdown.java | 6 +-- .../nodemanager/TestNodeStatusUpdater.java | 34 ++++++------ .../server/nodemanager/TestRPCFactories.java | 8 +-- .../server/nodemanager/TestRecordFactory.java | 4 +- .../BaseContainerManagerTest.java | 8 +-- .../TestContainerManager.java | 22 ++++---- .../localizer/TestContainerLocalizer.java | 4 +- .../TestLocalCacheDirectoryManager.java | 4 +- .../TestResourceLocalizationService.java | 4 +- .../TestLogAggregationService.java | 10 ++-- .../monitor/TestContainersMonitor.java | 4 +- .../server/resourcemanager/AdminService.java | 16 +++--- .../ApplicationMasterService.java | 10 ++-- .../resourcemanager/ClientRMService.java | 28 +++++----- .../resourcemanager/NodesListManager.java | 4 +- .../server/resourcemanager/RMAppManager.java | 4 +- .../resourcemanager/ResourceManager.java | 20 +++---- .../ResourceTrackerService.java | 10 ++-- .../amlauncher/AMLauncher.java | 8 +-- .../resourcemanager/rmapp/RMAppImpl.java | 6 +-- .../InvalidResourceRequestException.java | 4 +- .../server/resourcemanager/Application.java | 12 ++--- .../yarn/server/resourcemanager/MockRM.java | 4 +- .../server/resourcemanager/NodeManager.java | 12 ++--- .../resourcemanager/TestAMAuthorization.java | 10 ++-- .../resourcemanager/TestAppManager.java | 8 +-- .../resourcemanager/TestApplicationACLs.java | 4 +- .../TestApplicationMasterLauncher.java | 8 +-- .../resourcemanager/TestClientRMService.java | 18 +++---- .../resourcemanager/TestClientRMTokens.java | 16 +++--- .../resourcemanager/TestResourceManager.java | 14 ++--- .../TestRMNMRPCResponseId.java | 4 +- .../capacity/TestCapacityScheduler.java | 4 +- .../scheduler/fifo/TestFifoScheduler.java | 4 +- .../security/TestClientTokens.java | 18 +++---- .../hadoop/yarn/server/MiniYARNCluster.java | 22 ++++---- .../server/TestContainerManagerSecurity.java | 16 +++--- .../server/webproxy/AppReportFetcher.java | 6 +-- .../yarn/server/webproxy/WebAppProxy.java | 8 +-- .../server/webproxy/WebAppProxyServer.java | 4 +- .../server/webproxy/WebAppProxyServlet.java | 6 +-- 170 files changed, 859 insertions(+), 849 deletions(-) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/{YarnException.java => YarnRuntimeException.java} (75%) rename hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/{YarnRemoteException.java => YarnException.java} (80%) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 43048b5298f..0dfd7426bdd 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -499,6 +499,9 @@ Release 2.1.0-beta - UNRELEASED MAPREDUCE-5297. Updated MR App since BuilderUtils is no longer public after YARN-748. (Jian He via vinodkv) + MAPREDUCE-5301. Updated MR code to work with YARN-635 changes of renaming + YarnRemoteException to YarnException. (Siddharth Seth via vinodkv) + BREAKDOWN OF HADOOP-8562 SUBTASKS MAPREDUCE-4739. Some MapReduce tests fail to find winutils. diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java index f3d3b1eebea..d5243107ef3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java @@ -50,7 +50,7 @@ import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.service.AbstractService; @@ -129,7 +129,7 @@ public class LocalContainerLauncher extends AbstractService implements try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); // FIXME? YarnException is "for runtime exceptions only" + throw new YarnRuntimeException(e); // FIXME? YarnRuntimeException is "for runtime exceptions only" } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java index 38a43454ee0..e30d41401b6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java @@ -52,7 +52,7 @@ import org.apache.hadoop.mapreduce.v2.app.security.authorize.MRAMPolicyProvider; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.util.StringInterner; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.CompositeService; /** @@ -134,7 +134,7 @@ public class TaskAttemptListenerImpl extends CompositeService server.start(); this.address = NetUtils.getConnectAddress(server); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryCopyService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryCopyService.java index b4873de0e90..e4f6ffa17ad 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryCopyService.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryCopyService.java @@ -30,7 +30,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.service.CompositeService; @@ -76,7 +76,7 @@ public class JobHistoryCopyService extends CompositeService implements HistoryEv //TODO should we parse on a background thread??? parse(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } super.start(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java index 296559e7ccf..6c671bf4853 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java @@ -52,7 +52,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.service.AbstractService; @@ -131,7 +131,7 @@ public class JobHistoryEventHandler extends AbstractService JobHistoryUtils.getHistoryIntermediateDoneDirForUser(conf); } catch (IOException e) { LOG.error("Failed while getting the configured log directories", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } //Check for the existence of the history staging dir. Maybe create it. @@ -144,7 +144,7 @@ public class JobHistoryEventHandler extends AbstractService } catch (IOException e) { LOG.error("Failed while checking for/creating history staging path: [" + stagingDirPath + "]", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } //Check for the existence of intermediate done dir. @@ -178,13 +178,13 @@ public class JobHistoryEventHandler extends AbstractService + ". Either set to true or pre-create this directory with" + " appropriate permissions"; LOG.error(message); - throw new YarnException(message); + throw new YarnRuntimeException(message); } } } catch (IOException e) { LOG.error("Failed checking for the existance of history intermediate " + "done directory: [" + doneDirPath + "]"); - throw new YarnException(e); + throw new YarnRuntimeException(e); } //Check/create user directory under intermediate done dir. @@ -196,7 +196,7 @@ public class JobHistoryEventHandler extends AbstractService } catch (IOException e) { LOG.error("Error creating user intermediate history done directory: [ " + doneDirPrefixPath + "]", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } // Maximum number of unflushed completion-events that can stay in the queue @@ -457,7 +457,7 @@ public class JobHistoryEventHandler extends AbstractService eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -479,7 +479,7 @@ public class JobHistoryEventHandler extends AbstractService } catch (IOException ioe) { LOG.error("Error JobHistoryEventHandler in handleEvent: " + event, ioe); - throw new YarnException(ioe); + throw new YarnRuntimeException(ioe); } } @@ -501,7 +501,7 @@ public class JobHistoryEventHandler extends AbstractService } catch (IOException e) { LOG.error("Error writing History Event: " + event.getHistoryEvent(), e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } if (event.getHistoryEvent().getEventType() == EventType.JOB_SUBMITTED) { @@ -523,7 +523,7 @@ public class JobHistoryEventHandler extends AbstractService mi.getJobIndexInfo().setJobStatus(JobState.SUCCEEDED.toString()); closeEventWriter(event.getJobID()); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -539,7 +539,7 @@ public class JobHistoryEventHandler extends AbstractService mi.getJobIndexInfo().setJobStatus(jucEvent.getStatus()); closeEventWriter(event.getJobID()); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index 2a3f9c17986..eb4e6c3eefb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -113,7 +113,7 @@ import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; import org.apache.hadoop.yarn.SystemClock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; @@ -301,7 +301,7 @@ public class MRAppMaster extends CompositeService { } } } catch (IOException e) { - throw new YarnException("Error while initializing", e); + throw new YarnRuntimeException("Error while initializing", e); } if (errorHappenedShutDown) { @@ -442,7 +442,7 @@ public class MRAppMaster extends CompositeService { .getOutputFormatClass(), conf); committer = outputFormat.getOutputCommitter(taskContext); } catch (Exception e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } else { committer = ReflectionUtils.newInstance(conf.getClass( @@ -623,7 +623,7 @@ public class MRAppMaster extends CompositeService { + jobTokenFile); currentUser.addCredentials(fsTokens); // For use by AppMaster itself. } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -662,19 +662,19 @@ public class MRAppMaster extends CompositeService { } catch (InstantiationException ex) { LOG.error("Can't make a speculator -- check " + MRJobConfig.MR_AM_JOB_SPECULATOR, ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (IllegalAccessException ex) { LOG.error("Can't make a speculator -- check " + MRJobConfig.MR_AM_JOB_SPECULATOR, ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (InvocationTargetException ex) { LOG.error("Can't make a speculator -- check " + MRJobConfig.MR_AM_JOB_SPECULATOR, ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (NoSuchMethodException ex) { LOG.error("Can't make a speculator -- check " + MRJobConfig.MR_AM_JOB_SPECULATOR, ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java index 93326cdafc8..123b1f9162c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java @@ -48,7 +48,7 @@ import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.service.AbstractService; @@ -103,7 +103,7 @@ public class CommitterEventHandler extends AbstractService endCommitSuccessFile = MRApps.getEndJobCommitSuccessFile(conf, user, jobId); endCommitFailureFile = MRApps.getEndJobCommitFailureFile(conf, user, jobId); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -143,7 +143,7 @@ public class CommitterEventHandler extends AbstractService try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -218,7 +218,7 @@ public class CommitterEventHandler extends AbstractService handleTaskAbort((CommitterTaskAbortEvent) event); break; default: - throw new YarnException("Unexpected committer event " + throw new YarnRuntimeException("Unexpected committer event " + event.toString()); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java index 367b0280845..4645e861f12 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java @@ -113,7 +113,7 @@ import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.Clock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeReport; @@ -1466,7 +1466,7 @@ public class JobImpl implements org.apache.hadoop.mapreduce.v2.app.job.Job, job.conf, job.remoteJobSubmitDir); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } return allTaskSplitMetaInfo; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index 5fb4f89808b..2d9f9d43d7c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -111,7 +111,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.Clock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.Container; @@ -733,7 +733,7 @@ public abstract class TaskAttemptImpl implements initialAppClasspath); } } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } // Shell @@ -1207,7 +1207,7 @@ public abstract class TaskAttemptImpl implements case SUCCEEDED: return TaskAttemptState.SUCCEEDED; default: - throw new YarnException("Attempt to convert invalid " + throw new YarnRuntimeException("Attempt to convert invalid " + "stateMachineTaskAttemptState to externalTaskAttemptState: " + smState); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java index 5911f3cd865..25b0f85b1a7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java @@ -48,7 +48,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ContainerManager; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerResponse; @@ -423,7 +423,7 @@ public class ContainerLauncherImpl extends AbstractService implements try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java index 441dbcb789b..d1d36dd1e8f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java @@ -35,7 +35,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssigned import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent; import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.Container; @@ -109,7 +109,7 @@ public class LocalContainerAllocator extends RMCommunicator LOG.error("Could not contact RM after " + retryInterval + " milliseconds."); eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.INTERNAL_ERROR)); - throw new YarnException("Could not contact RM after " + + throw new YarnRuntimeException("Could not contact RM after " + retryInterval + " milliseconds."); } // Throw this up to the caller, which may decide to ignore it and @@ -122,7 +122,7 @@ public class LocalContainerAllocator extends RMCommunicator // this application must clean itself up. eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.JOB_AM_REBOOT)); - throw new YarnException("Resource Manager doesn't recognize AttemptId: " + + throw new YarnRuntimeException("Resource Manager doesn't recognize AttemptId: " + this.getContext().getApplicationID()); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java index 0e4cfe05857..369b5255aea 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java @@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; @@ -163,7 +163,7 @@ public abstract class RMCommunicator extends AbstractService LOG.info("maxContainerCapability: " + maxContainerCapability.getMemory()); } catch (Exception are) { LOG.error("Exception while registering", are); - throw new YarnException(are); + throw new YarnRuntimeException(are); } } @@ -237,7 +237,7 @@ public abstract class RMCommunicator extends AbstractService Thread.sleep(rmPollInterval); try { heartbeat(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { LOG.error("Error communicating with RM: " + e.getMessage() , e); return; } catch (Exception e) { @@ -273,7 +273,7 @@ public abstract class RMCommunicator extends AbstractService try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } // CurrentUser should already have AMToken loaded. diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java index a03dfdd6715..d21c6fa52b0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java @@ -58,7 +58,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptKillEvent; import org.apache.hadoop.util.StringInterner; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ContainerExitStatus; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.Container; @@ -274,7 +274,7 @@ public class RMContainerAllocator extends RMContainerRequestor try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -563,7 +563,7 @@ public class RMContainerAllocator extends RMContainerRequestor LOG.error("Could not contact RM after " + retryInterval + " milliseconds."); eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.INTERNAL_ERROR)); - throw new YarnException("Could not contact RM after " + + throw new YarnRuntimeException("Could not contact RM after " + retryInterval + " milliseconds."); } // Throw this up to the caller, which may decide to ignore it and @@ -575,7 +575,7 @@ public class RMContainerAllocator extends RMContainerRequestor // this application must clean itself up. eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.JOB_AM_REBOOT)); - throw new YarnException("Resource Manager doesn't recognize AttemptId: " + + throw new YarnRuntimeException("Resource Manager doesn't recognize AttemptId: " + this.getContext().getApplicationID()); } int newHeadRoom = getAvailableResources() != null ? getAvailableResources().getMemory() : 0; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java index 431fd15b910..59f113a4cf0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java @@ -36,14 +36,14 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; @@ -137,7 +137,7 @@ public abstract class RMContainerRequestor extends RMCommunicator { MRJobConfig.DEFAULT_MR_AM_IGNORE_BLACKLISTING_BLACKLISTED_NODE_PERCENT); LOG.info("maxTaskFailuresPerNode is " + maxTaskFailuresPerNode); if (blacklistDisablePercent < -1 || blacklistDisablePercent > 100) { - throw new YarnException("Invalid blacklistDisablePercent: " + throw new YarnRuntimeException("Invalid blacklistDisablePercent: " + blacklistDisablePercent + ". Should be an integer between 0 and 100 or -1 to disabled"); } @@ -152,7 +152,7 @@ public abstract class RMContainerRequestor extends RMCommunicator { AllocateResponse allocateResponse; try { allocateResponse = scheduler.allocate(allocateRequest); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } lastResponseID = allocateResponse.getResponseId(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java index b2e437b10d1..3ecb94e4938 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java @@ -48,7 +48,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus; import org.apache.hadoop.yarn.Clock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.service.AbstractService; @@ -129,16 +129,16 @@ public class DefaultSpeculator extends AbstractService implements estimator.contextualize(conf, context); } catch (InstantiationException ex) { LOG.error("Can't make a speculation runtime extimator", ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (IllegalAccessException ex) { LOG.error("Can't make a speculation runtime extimator", ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (InvocationTargetException ex) { LOG.error("Can't make a speculation runtime extimator", ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (NoSuchMethodException ex) { LOG.error("Can't make a speculation runtime extimator", ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } return estimator; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java index 1e671aa6225..e9a1b1c11fb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java @@ -58,7 +58,7 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.NotFoundException; @@ -102,8 +102,8 @@ public class AMWebServices { Job job; try { jobId = MRApps.toJobID(jid); - } catch (YarnException e) { - // TODO: after MAPREDUCE-2793 YarnException is probably not expected here + } catch (YarnRuntimeException e) { + // TODO: after MAPREDUCE-2793 YarnRuntimeException is probably not expected here // anymore but keeping it for now just in case other stuff starts failing. // Also, the webservice should ideally return BadRequest (HTTP:400) when // the id is malformed instead of NotFound (HTTP:404). The webserver on @@ -132,8 +132,8 @@ public class AMWebServices { Task task; try { taskID = MRApps.toTaskID(tid); - } catch (YarnException e) { - // TODO: after MAPREDUCE-2793 YarnException is probably not expected here + } catch (YarnRuntimeException e) { + // TODO: after MAPREDUCE-2793 YarnRuntimeException is probably not expected here // anymore but keeping it for now just in case other stuff starts failing. // Also, the webservice should ideally return BadRequest (HTTP:400) when // the id is malformed instead of NotFound (HTTP:404). The webserver on @@ -165,8 +165,8 @@ public class AMWebServices { TaskAttempt ta; try { attemptId = MRApps.toTaskAttemptID(attId); - } catch (YarnException e) { - // TODO: after MAPREDUCE-2793 YarnException is probably not expected here + } catch (YarnRuntimeException e) { + // TODO: after MAPREDUCE-2793 YarnRuntimeException is probably not expected here // anymore but keeping it for now just in case other stuff starts failing. // Also, the webservice should ideally return BadRequest (HTTP:400) when // the id is malformed instead of NotFound (HTTP:404). The webserver on @@ -304,7 +304,7 @@ public class AMWebServices { if (type != null && !type.isEmpty()) { try { ttype = MRApps.taskType(type); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { throw new BadRequestException("tasktype must be either m or r"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java index 8311f13b2a8..422d8cd5f20 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java @@ -44,7 +44,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -254,7 +254,7 @@ public class TestJobHistoryEventHandler { return testWorkDir.getAbsolutePath(); } catch (Exception e) { LOG.warn("Could not cleanup", e); - throw new YarnException("could not cleanup test dir", e); + throw new YarnRuntimeException("could not cleanup test dir", e); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index 8a5b67952b9..96854a6ecba 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -87,7 +87,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; import org.apache.hadoop.yarn.SystemClock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; @@ -197,7 +197,7 @@ public class MRApp extends MRAppMaster { FileContext.getLocalFSFileContext().delete(testAbsPath, true); } catch (Exception e) { LOG.warn("COULD NOT CLEANUP: " + testAbsPath, e); - throw new YarnException("could not cleanup test dir", e); + throw new YarnRuntimeException("could not cleanup test dir", e); } } @@ -215,7 +215,7 @@ public class MRApp extends MRAppMaster { FileSystem fs = getFileSystem(conf); fs.mkdirs(stagingDir); } catch (Exception e) { - throw new YarnException("Error creating staging dir", e); + throw new YarnRuntimeException("Error creating staging dir", e); } super.init(conf); @@ -404,7 +404,7 @@ public class MRApp extends MRAppMaster { try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } Job newJob = new TestJob(getJobId(), getAttemptID(), conf, getDispatcher().getEventHandler(), diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java index a6dc5667e3f..4b6b0e39e73 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java @@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssigned import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent; import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; @@ -126,7 +126,7 @@ public class MRAppBenchmark { try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @Override diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java index 98d5c0f2703..0153b95fc08 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java @@ -58,7 +58,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -450,7 +450,7 @@ class MRAppMasterTest extends MRAppMaster { try { this.currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java index d5a1edc1894..63bd7b33a7f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java @@ -74,7 +74,7 @@ import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; import org.apache.hadoop.yarn.SystemClock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -1429,7 +1429,7 @@ public class TestRMContainerAllocator { super.heartbeat(); } catch (Exception e) { LOG.error("error in heartbeat ", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } List result diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java index 8a0cf168e8b..a8cedc6176e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java @@ -49,7 +49,7 @@ import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -292,7 +292,7 @@ import org.junit.Test; try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } Job newJob = new TestJob(getJobId(), getAttemptID(), conf, getDispatcher().getEventHandler(), diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java index 167c291bd9b..283ff680e57 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java @@ -60,7 +60,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.JobCommitCompletedEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.JobCommitFailedEvent; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Event; @@ -295,7 +295,7 @@ public class TestCommitterEventHandler { when(mockContext.getEventHandler()).thenReturn(waitForItHandler); when(mockContext.getClock()).thenReturn(mockClock); - doThrow(new YarnException("Intentional Failure")).when(mockCommitter) + doThrow(new YarnRuntimeException("Intentional Failure")).when(mockCommitter) .commitJob(any(JobContext.class)); handler.init(conf); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java index 6bae722d005..141e0b0bc30 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java @@ -61,7 +61,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -460,14 +460,14 @@ public class TestContainerLauncherImpl { } @SuppressWarnings("serial") - private static class ContainerException extends YarnRemoteException { + private static class ContainerException extends YarnException { public ContainerException(String message) { super(message); } @Override - public YarnRemoteException getCause() { + public YarnException getCause() { return null; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java index bb2c87475b8..35dfaa7a8f0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java @@ -30,14 +30,14 @@ import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.yarn.ClusterInfo; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.junit.Assert; import org.junit.Test; @@ -55,13 +55,13 @@ public class TestLocalContainerAllocator { try { lca.heartbeat(); Assert.fail("heartbeat was supposed to throw"); - } catch (YarnRemoteException e) { - // YarnRemoteException is expected + } catch (YarnException e) { + // YarnException is expected } finally { lca.stop(); } - // verify YarnException is thrown when the retry interval has expired + // verify YarnRuntimeException is thrown when the retry interval has expired conf.setLong(MRJobConfig.MR_AM_TO_RM_WAIT_INTERVAL_MS, 0); lca = new StubbedLocalContainerAllocator(); lca.init(conf); @@ -69,8 +69,8 @@ public class TestLocalContainerAllocator { try { lca.heartbeat(); Assert.fail("heartbeat was supposed to throw"); - } catch (YarnException e) { - // YarnException is expected + } catch (YarnRuntimeException e) { + // YarnRuntimeException is expected } finally { lca.stop(); } @@ -99,7 +99,7 @@ public class TestLocalContainerAllocator { try { when(scheduler.allocate(isA(AllocateRequest.class))) .thenThrow(RPCUtil.getRemoteException(new IOException("forcefail"))); - } catch (YarnRemoteException e) { + } catch (YarnException e) { } catch (IOException e) { } return scheduler; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java index 892eb87df52..6b610b23e98 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java @@ -41,7 +41,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.util.MRApps; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; @@ -98,7 +98,7 @@ public class TypeConverter { case REDUCE: return org.apache.hadoop.mapreduce.TaskType.REDUCE; default: - throw new YarnException("Unrecognized task type: " + taskType); + throw new YarnRuntimeException("Unrecognized task type: " + taskType); } } @@ -110,7 +110,7 @@ public class TypeConverter { case REDUCE: return TaskType.REDUCE; default: - throw new YarnException("Unrecognized task type: " + taskType); + throw new YarnRuntimeException("Unrecognized task type: " + taskType); } } @@ -145,7 +145,7 @@ public class TypeConverter { case UNASSIGNED: return TaskAttemptState.STARTING; default: - throw new YarnException("Unrecognized State: " + state); + throw new YarnRuntimeException("Unrecognized State: " + state); } } @@ -164,7 +164,7 @@ public class TypeConverter { case CLEANUP: return Phase.CLEANUP; } - throw new YarnException("Unrecognized Phase: " + phase); + throw new YarnRuntimeException("Unrecognized Phase: " + phase); } public static TaskCompletionEvent[] fromYarn( @@ -202,7 +202,7 @@ public class TypeConverter { case TIPFAILED: return TaskCompletionEvent.Status.TIPFAILED; } - throw new YarnException("Unrecognized status: " + newStatus); + throw new YarnRuntimeException("Unrecognized status: " + newStatus); } public static org.apache.hadoop.mapred.TaskAttemptID fromYarn( @@ -328,7 +328,7 @@ public class TypeConverter { case ERROR: return org.apache.hadoop.mapred.JobStatus.FAILED; } - throw new YarnException("Unrecognized job state: " + state); + throw new YarnRuntimeException("Unrecognized job state: " + state); } public static org.apache.hadoop.mapred.TIPStatus fromYarn( @@ -346,7 +346,7 @@ public class TypeConverter { case FAILED: return org.apache.hadoop.mapred.TIPStatus.FAILED; } - throw new YarnException("Unrecognized task state: " + state); + throw new YarnRuntimeException("Unrecognized task state: " + state); } public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.records.TaskReport report) { @@ -408,7 +408,7 @@ public class TypeConverter { case KILLED: return State.KILLED; } - throw new YarnException("Unrecognized application state: " + yarnApplicationState); + throw new YarnRuntimeException("Unrecognized application state: " + yarnApplicationState); } private static final String TT_NAME_PREFIX = "tracker_"; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index 80ae926c566..3b027607118 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -49,7 +49,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.yarn.ContainerLogAppender; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.LocalResource; @@ -97,7 +97,7 @@ public class MRApps extends Apps { case MAP: return "m"; case REDUCE: return "r"; } - throw new YarnException("Unknown task type: "+ type.toString()); + throw new YarnRuntimeException("Unknown task type: "+ type.toString()); } public static enum TaskAttemptStateUI { @@ -126,7 +126,7 @@ public class MRApps extends Apps { // JDK 7 supports switch on strings if (symbol.equals("m")) return TaskType.MAP; if (symbol.equals("r")) return TaskType.REDUCE; - throw new YarnException("Unknown task symbol: "+ symbol); + throw new YarnRuntimeException("Unknown task symbol: "+ symbol); } public static TaskAttemptStateUI taskAttemptState(String attemptStateStr) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java index c0868406ece..84b827e6611 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java @@ -56,7 +56,7 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskResponse; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenResponse; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; import org.junit.Test; @@ -84,7 +84,7 @@ public class TestRPCFactories { RpcServerFactoryPBImpl.get().getServer( MRClientProtocol.class, instance, addr, conf, null, 1); server.start(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete server"); } finally { @@ -110,12 +110,12 @@ public class TestRPCFactories { MRClientProtocol client = null; try { client = (MRClientProtocol) RpcClientFactoryPBImpl.get().getClient(MRClientProtocol.class, 1, NetUtils.getConnectAddress(server), conf); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete client"); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete server"); } finally { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java index 08e4b94c8e4..7e6a6725881 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java @@ -20,7 +20,7 @@ package org.apache.hadoop.mapreduce.v2; import junit.framework.Assert; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest; @@ -38,7 +38,7 @@ public class TestRecordFactory { try { CounterGroup response = pbRecordFactory.newRecordInstance(CounterGroup.class); Assert.assertEquals(CounterGroupPBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } @@ -46,7 +46,7 @@ public class TestRecordFactory { try { GetCountersRequest response = pbRecordFactory.newRecordInstance(GetCountersRequest.class); Assert.assertEquals(GetCountersRequestPBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CachedHistoryStorage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CachedHistoryStorage.java index eb4e78499fe..3a0e8f4b49c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CachedHistoryStorage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CachedHistoryStorage.java @@ -37,7 +37,7 @@ import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo; import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.AbstractService; /** @@ -60,7 +60,7 @@ public class CachedHistoryStorage extends AbstractService implements @SuppressWarnings("serial") @Override - public void init(Configuration conf) throws YarnException { + public void init(Configuration conf) throws YarnRuntimeException { LOG.info("CachedHistoryStorage Init"); loadedJobCacheSize = conf.getInt( @@ -94,7 +94,7 @@ public class CachedHistoryStorage extends AbstractService implements loadedJobCache.put(job.getID(), job); return job; } catch (IOException e) { - throw new YarnException( + throw new YarnRuntimeException( "Could not find/load job: " + fileInfo.getJobId(), e); } } @@ -120,7 +120,7 @@ public class CachedHistoryStorage extends AbstractService implements } return result; } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -137,7 +137,7 @@ public class CachedHistoryStorage extends AbstractService implements } } catch (IOException e) { LOG.warn("Error trying to scan for all FileInfos", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } return result; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java index 2f3c57d6b8c..cf67d85e2bf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java @@ -60,7 +60,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.util.Records; @@ -333,12 +333,12 @@ public class CompletedJob implements org.apache.hadoop.mapreduce.v2.app.job.Job historyFileAbsolute); this.jobInfo = parser.parse(); } catch (IOException e) { - throw new YarnException("Could not load history file " + throw new YarnRuntimeException("Could not load history file " + historyFileAbsolute, e); } IOException parseException = parser.getParseException(); if (parseException != null) { - throw new YarnException( + throw new YarnRuntimeException( "Could not parse history file " + historyFileAbsolute, parseException); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java index a55e5ad1910..eb21e4a7335 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java @@ -61,7 +61,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.AbstractService; import com.google.common.annotations.VisibleForTesting; @@ -477,7 +477,7 @@ public class HistoryFileManager extends AbstractService { mkdir(doneDirFc, doneDirPrefixPath, new FsPermission( JobHistoryUtils.HISTORY_DONE_DIR_PERMISSION)); } catch (IOException e) { - throw new YarnException("Error creating done directory: [" + throw new YarnRuntimeException("Error creating done directory: [" + doneDirPrefixPath + "]", e); } @@ -493,7 +493,7 @@ public class HistoryFileManager extends AbstractService { JobHistoryUtils.HISTORY_INTERMEDIATE_DONE_DIR_PERMISSIONS.toShort())); } catch (IOException e) { LOG.info("error creating done directory on dfs " + e); - throw new YarnException("Error creating intermediate done directory: [" + throw new YarnRuntimeException("Error creating intermediate done directory: [" + intermediateDoneDirPath + "]", e); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java index 0f9cf267b55..4ad42ad8d51 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java @@ -40,7 +40,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.event.EventHandler; @@ -71,7 +71,7 @@ public class JobHistory extends AbstractService implements HistoryContext { private HistoryFileManager hsManager = null; @Override - public void init(Configuration conf) throws YarnException { + public void init(Configuration conf) throws YarnRuntimeException { LOG.info("JobHistory Init"); this.conf = conf; this.appID = ApplicationId.newInstance(0, 0); @@ -87,7 +87,7 @@ public class JobHistory extends AbstractService implements HistoryContext { try { hsManager.initExisting(); } catch (IOException e) { - throw new YarnException("Failed to intialize existing directories", e); + throw new YarnRuntimeException("Failed to intialize existing directories", e); } storage = ReflectionUtils.newInstance(conf.getClass( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java index f76f030cc69..44eb2178885 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java @@ -32,7 +32,7 @@ import org.apache.hadoop.metrics2.source.JvmMetrics; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; @@ -73,7 +73,7 @@ public class JobHistoryServer extends CompositeService { try { doSecureLogin(conf); } catch(IOException ie) { - throw new YarnException("History Server Failed to login", ie); + throw new YarnRuntimeException("History Server Failed to login", ie); } jobHistoryService = new JobHistory(); historyContext = (HistoryContext)jobHistoryService; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java index 430c35ce1c9..e84894cafdd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java @@ -55,7 +55,7 @@ import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo; import org.apache.hadoop.mapreduce.v2.util.MRApps; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.NotFoundException; import org.apache.hadoop.yarn.webapp.WebApp; @@ -255,7 +255,7 @@ public class HsWebServices { if (type != null && !type.isEmpty()) { try { ttype = MRApps.taskType(type); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { throw new BadRequestException("tasktype must be either m or r"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java index 8db7e9783a0..46c266ac625 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java @@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.ipc.YarnRPC; public class ClientCache { @@ -60,7 +60,7 @@ public class ClientCache { hsProxy = instantiateHistoryProxy(); } catch (IOException e) { LOG.warn("Could not connect to History server.", e); - throw new YarnException("Could not connect to History server.", e); + throw new YarnRuntimeException("Could not connect to History server.", e); } } ClientServiceDelegate client = cache.get(jobId); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java index 1e5cd301b50..f49370e8fd3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java @@ -65,12 +65,12 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -143,7 +143,7 @@ public class ClientServiceDelegate { ApplicationReport application = null; try { application = rm.getApplicationReport(appId); - } catch (YarnRemoteException e2) { + } catch (YarnException e2) { throw new IOException(e2); } if (application != null) { @@ -212,11 +212,11 @@ public class ClientServiceDelegate { Thread.sleep(2000); } catch (InterruptedException e1) { LOG.warn("getProxy() call interruped", e1); - throw new YarnException(e1); + throw new YarnRuntimeException(e1); } try { application = rm.getApplicationReport(appId); - } catch (YarnRemoteException e1) { + } catch (YarnException e1) { throw new IOException(e1); } if (application == null) { @@ -226,8 +226,8 @@ public class ClientServiceDelegate { } } catch (InterruptedException e) { LOG.warn("getProxy() call interruped", e); - throw new YarnException(e); - } catch (YarnRemoteException e) { + throw new YarnRuntimeException(e); + } catch (YarnException e) { throw new IOException(e); } } @@ -296,9 +296,9 @@ public class ClientServiceDelegate { try { methodOb = MRClientProtocol.class.getMethod(method, argClass); } catch (SecurityException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (NoSuchMethodException e) { - throw new YarnException("Method name mismatch", e); + throw new YarnRuntimeException("Method name mismatch", e); } int maxRetries = this.conf.getInt( MRJobConfig.MR_CLIENT_MAX_RETRIES, @@ -308,7 +308,7 @@ public class ClientServiceDelegate { try { return methodOb.invoke(getProxy(), args); } catch (InvocationTargetException e) { - // Will not throw out YarnRemoteException anymore + // Will not throw out YarnException anymore LOG.debug("Failed to contact AM/History for job " + jobId + " retrying..", e.getTargetException()); // Force reconnection by setting the proxy to null. diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java index b893c927a05..dee5d183b8a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java @@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.client.YarnClientImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ProtoUtils; public class ResourceMgrDelegate extends YarnClientImpl { @@ -68,7 +68,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { InterruptedException { try { return TypeConverter.fromYarnNodes(super.getNodeReports()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -76,7 +76,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { public JobStatus[] getAllJobs() throws IOException, InterruptedException { try { return TypeConverter.fromYarnApps(super.getApplicationList(), this.conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -98,7 +98,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { metrics.getNumNodeManagers() * 2, 1, metrics.getNumNodeManagers(), 0, 0); return oldMetrics; - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -113,7 +113,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { try { return ProtoUtils.convertFromProtoFormat( super.getRMDelegationToken(renewer), rmAddress); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -127,7 +127,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { this.application = super.getNewApplication(); this.applicationId = this.application.getApplicationId(); return TypeConverter.fromYarn(applicationId); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -139,7 +139,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { super.getQueueInfo(queueName); return (queueInfo == null) ? null : TypeConverter.fromYarn(queueInfo, conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -149,7 +149,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { try { return TypeConverter.fromYarnQueueUserAclsInfo(super .getQueueAclsInfo()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -157,7 +157,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { public QueueInfo[] getQueues() throws IOException, InterruptedException { try { return TypeConverter.fromYarnQueueInfo(super.getAllQueues(), this.conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -166,7 +166,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { try { return TypeConverter.fromYarnQueueInfo(super.getRootQueueInfos(), this.conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -176,7 +176,7 @@ public class ResourceMgrDelegate extends YarnClientImpl { try { return TypeConverter.fromYarnQueueInfo(super.getChildQueueInfos(parent), this.conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 4d950cd6e06..4686d9f2136 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -64,7 +64,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; @@ -79,7 +79,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.security.client.RMTokenSelector; @@ -287,7 +287,7 @@ public class YARNRunner implements ClientProtocol { try { ts.writeTokenStorageFile(applicationTokensFile, conf); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } // Construct necessary information to start the MR AM @@ -311,7 +311,7 @@ public class YARNRunner implements ClientProtocol { diagnostics); } return clientCache.getClient(jobId).getJobStatus(jobId); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -561,7 +561,7 @@ public class YARNRunner implements ClientProtocol { if (status.getState() != JobStatus.State.RUNNING) { try { resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } return; @@ -589,7 +589,7 @@ public class YARNRunner implements ClientProtocol { if (status.getState() != JobStatus.State.KILLED) { try { resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java index e6a8d2abd24..e95c96c693a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java @@ -68,7 +68,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ClientRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse; @@ -404,7 +404,7 @@ public class TestClientRedirect { address.getAddress(); hostNameResolved = InetAddress.getLocalHost(); } catch (UnknownHostException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } server = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java index ef4aa258c96..5bd1e1f6b59 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java @@ -56,7 +56,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; import org.junit.Test; import org.junit.runner.RunWith; @@ -215,7 +215,7 @@ public class TestClientServiceDelegate { getRunningApplicationReport(null, 0)).thenReturn( getRunningApplicationReport(null, 0)).thenReturn( getRunningApplicationReport("am2", 90)); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } @@ -285,7 +285,7 @@ public class TestClientServiceDelegate { getRunningApplicationReport("am1", 78)).thenReturn( getRunningApplicationReport("am1", 78)).thenReturn( getFinishedApplicationReport()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } @@ -366,7 +366,7 @@ public class TestClientServiceDelegate { verify(rmDelegate, times(3)).getApplicationReport( any(ApplicationId.class)); Assert.assertNotNull(jobStatus); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -396,7 +396,7 @@ public class TestClientServiceDelegate { } verify(rmDelegate, times(noOfRetries)).getApplicationReport( any(ApplicationId.class)); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -447,7 +447,7 @@ public class TestClientServiceDelegate { ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class); try { when(rm.getApplicationReport(jobId.getAppId())).thenReturn(null); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } return rm; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java index 5561633a6d2..85dcea9cddb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java @@ -45,7 +45,7 @@ import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus; import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.junit.Test; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; @@ -181,12 +181,12 @@ public class TestNetworkedJob { try { client.getSetupTaskReports(jobId); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { assertEquals(e.getMessage(), "Unrecognized task type: JOB_SETUP"); } try { client.getCleanupTaskReports(jobId); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { assertEquals(e.getMessage(), "Unrecognized task type: JOB_CLEANUP"); } assertEquals(client.getReduceTaskReports(jobId).length, 0); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java index c2c17aeb3c4..6b60b36d35c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java @@ -38,7 +38,7 @@ import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; import org.junit.Test; import org.mockito.ArgumentCaptor; @@ -60,7 +60,7 @@ public class TestResourceMgrDelegate { try { Mockito.when(applicationsManager.getQueueInfo(Mockito.any( GetQueueInfoRequest.class))).thenReturn(response); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } @@ -78,7 +78,7 @@ public class TestResourceMgrDelegate { try { Mockito.verify(applicationsManager).getQueueInfo( argument.capture()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java index 10d7a71e1b8..e1cf3d40468 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java @@ -35,7 +35,7 @@ import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.util.JarFinder; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; @@ -100,7 +100,7 @@ public class MiniMRYarnCluster extends MiniYARNCluster { Path doneDirPath = fc.makeQualified(new Path(doneDir)); fc.mkdir(doneDirPath, null, true); } catch (IOException e) { - throw new YarnException("Could not create staging directory. ", e); + throw new YarnRuntimeException("Could not create staging directory. ", e); } conf.set(MRConfig.MASTER_ADDRESS, "test"); // The default is local because of // which shuffle doesn't happen @@ -158,7 +158,7 @@ public class MiniMRYarnCluster extends MiniYARNCluster { } super.start(); } catch (Throwable t) { - throw new YarnException(t); + throw new YarnRuntimeException(t); } //need to do this because historyServer.init creates a new Configuration getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS, diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 41acd5786ea..4a4bbe6562c 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -90,6 +90,9 @@ Release 2.1.0-beta - UNRELEASED YARN-748. Moved BuilderUtils from yarn-common to yarn-server-common for eventual retirement. (Jian He via vinodkv) + YARN-635. Renamed YarnRemoteException to YarnException. (Siddharth Seth via + vinodkv) + NEW FEATURES YARN-482. FS: Extend SchedulingMode to intermediate queues. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnRuntimeException.java similarity index 75% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnException.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnRuntimeException.java index 29279b6a46c..29fd39f6950 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnException.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnRuntimeException.java @@ -24,10 +24,14 @@ package org.apache.hadoop.yarn; * service, must include a String only constructor for the exception to be * unwrapped on the client. */ -public class YarnException extends RuntimeException { - public YarnException(Throwable cause) { super(cause); } - public YarnException(String message) { super(message); } - public YarnException(String message, Throwable cause) { +public class YarnRuntimeException extends RuntimeException { + /** + * + */ + private static final long serialVersionUID = -7153142425412203936L; + public YarnRuntimeException(Throwable cause) { super(cause); } + public YarnRuntimeException(String message) { super(message); } + public YarnRuntimeException(String message, Throwable cause) { super(message, cause); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/AMRMProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/AMRMProtocol.java index e6c8c66ae65..dedd9a9c3e4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/AMRMProtocol.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/AMRMProtocol.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterReque import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ResourceRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; /** *

The protocol between a live instance of ApplicationMaster @@ -58,12 +58,12 @@ public interface AMRMProtocol { * * @param request registration request * @return registration respose - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public RegisterApplicationMasterResponse registerApplicationMaster( RegisterApplicationMasterRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by an ApplicationMaster to notify the @@ -78,12 +78,12 @@ public interface AMRMProtocol { * * @param request completion request * @return completion response - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public FinishApplicationMasterResponse finishApplicationMaster( FinishApplicationMasterRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The main interface between an ApplicationMaster @@ -108,9 +108,9 @@ public interface AMRMProtocol { * * @param request allocation request * @return allocation response - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public AllocateResponse allocate(AllocateRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java index 6bef8cb417f..593a8ea1b79 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java @@ -54,7 +54,7 @@ import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; /** *

The protocol between clients and the ResourceManager @@ -79,13 +79,13 @@ public interface ClientRMProtocol { * @param request request to get a new ApplicationId * @return response containing the new ApplicationId to be used * to submit an application - * @throws YarnRemoteException + * @throws YarnException * @throws IOException * @see #submitApplication(SubmitApplicationRequest) */ public GetNewApplicationResponse getNewApplication( GetNewApplicationRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to submit a new application to the @@ -108,13 +108,13 @@ public interface ClientRMProtocol { * * @param request request to submit a new application * @return (empty) response on accepting the submission - * @throws YarnRemoteException + * @throws YarnException * @throws IOException * @see #getNewApplication(GetNewApplicationRequest) */ public SubmitApplicationResponse submitApplication( SubmitApplicationRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to request the @@ -132,13 +132,13 @@ public interface ClientRMProtocol { * @param request request to abort a submited application * @return ResourceManager returns an empty response * on success and throws an exception on rejecting the request - * @throws YarnRemoteException + * @throws YarnException * @throws IOException * @see #getQueueUserAcls(GetQueueUserAclsInfoRequest) */ public KillApplicationResponse forceKillApplication( KillApplicationRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get a report of an Application from @@ -168,12 +168,12 @@ public interface ClientRMProtocol { * * @param request request for an application report * @return application report - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetApplicationReportResponse getApplicationReport( GetApplicationReportRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get metrics about the cluster from @@ -186,12 +186,12 @@ public interface ClientRMProtocol { * * @param request request for cluster metrics * @return cluster metrics - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetClusterMetricsResponse getClusterMetrics( GetClusterMetricsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get a report of all Applications @@ -208,12 +208,12 @@ public interface ClientRMProtocol { * * @param request request for report on all running applications * @return report on all running applications - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetAllApplicationsResponse getAllApplications( GetAllApplicationsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get a report of all nodes @@ -225,12 +225,12 @@ public interface ClientRMProtocol { * * @param request request for report on all nodes * @return report on all nodes - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetClusterNodesResponse getClusterNodes( GetClusterNodesRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get information about queues @@ -244,12 +244,12 @@ public interface ClientRMProtocol { * * @param request request to get queue information * @return queue information - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetQueueInfoResponse getQueueInfo( GetQueueInfoRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get information about queue @@ -261,12 +261,12 @@ public interface ClientRMProtocol { * * @param request request to get queue acls for current user * @return queue acls for current user - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetQueueUserAclsInfoResponse getQueueUserAcls( GetQueueUserAclsInfoRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get delegation token, enabling the @@ -277,24 +277,24 @@ public interface ClientRMProtocol { * service. * @param request request to get a delegation token for the client. * @return delegation token that can be used to talk to this service - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetDelegationTokenResponse getDelegationToken( GetDelegationTokenRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** * Renew an existing delegation token. * * @param request the delegation token to be renewed. * @return the new expiry time for the delegation token. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Private public RenewDelegationTokenResponse renewDelegationToken( - RenewDelegationTokenRequest request) throws YarnRemoteException, + RenewDelegationTokenRequest request) throws YarnException, IOException; /** @@ -302,11 +302,11 @@ public interface ClientRMProtocol { * * @param request the delegation token to be cancelled. * @return an empty response. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Private public CancelDelegationTokenResponse cancelDelegationToken( - CancelDelegationTokenRequest request) throws YarnRemoteException, + CancelDelegationTokenRequest request) throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManager.java index 0961ac4e4d7..eab2e4654a7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManager.java @@ -32,7 +32,7 @@ import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; /** *

The protocol between an ApplicationMaster and a @@ -69,13 +69,13 @@ public interface ContainerManager { * @param request request to start a container * @return empty response to indicate acceptance of the request * or an exception - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Public @Stable StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The ApplicationMaster requests a NodeManager @@ -96,13 +96,13 @@ public interface ContainerManager { * @param request request to stop a container * @return empty response to indicate acceptance of the request * or an exception - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Public @Stable StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The api used by the ApplicationMaster to request for @@ -121,12 +121,12 @@ public interface ContainerManager { * with the specified ContainerId * @return response containing the ContainerStatus of the * container - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Public @Stable GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException, + GetContainerStatusRequest request) throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/RMAdminProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/RMAdminProtocol.java index 9a2d04c23aa..7fba4de707f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/RMAdminProtocol.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/RMAdminProtocol.java @@ -21,7 +21,7 @@ package org.apache.hadoop.yarn.api; import java.io.IOException; import org.apache.hadoop.tools.GetUserMappingsProtocol; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.api.protocolrecords.RefreshAdminAclsRequest; import org.apache.hadoop.yarn.api.protocolrecords.RefreshAdminAclsResponse; import org.apache.hadoop.yarn.api.protocolrecords.RefreshNodesRequest; @@ -37,25 +37,25 @@ import org.apache.hadoop.yarn.api.protocolrecords.RefreshUserToGroupsMappingsRes public interface RMAdminProtocol extends GetUserMappingsProtocol { public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration( RefreshSuperUserGroupsConfigurationRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( RefreshUserToGroupsMappingsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshAdminAclsResponse refreshAdminAcls( RefreshAdminAclsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshServiceAclsResponse refreshServiceAcls( RefreshServiceAclsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnRemoteException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnException.java similarity index 80% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnRemoteException.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnException.java index d03f8dd6b46..54b67975168 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnRemoteException.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnException.java @@ -18,23 +18,23 @@ package org.apache.hadoop.yarn.exceptions; -public class YarnRemoteException extends Exception { +public class YarnException extends Exception { private static final long serialVersionUID = 1L; - public YarnRemoteException() { + public YarnException() { super(); } - public YarnRemoteException(String message) { + public YarnException(String message) { super(message); } - public YarnRemoteException(Throwable cause) { + public YarnException(Throwable cause) { super(cause); } - public YarnRemoteException(String message, Throwable cause) { + public YarnException(String message, Throwable cause) { super(message, cause); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java index f016675e1b0..a80da183c1a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java @@ -22,7 +22,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RecordFactory; @@ -55,13 +55,13 @@ public class RecordFactoryProvider { method.setAccessible(true); return method.invoke(null, null); } catch (ClassNotFoundException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (NoSuchMethodException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 88dcffd5c99..f645837c892 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -72,7 +72,7 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.AMRMClientAsync; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -434,10 +434,10 @@ public class ApplicationMaster { /** * Main run function for the application master * - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - public boolean run() throws YarnRemoteException, IOException { + public boolean run() throws YarnException, IOException { LOG.info("Starting ApplicationMaster"); AMRMClientAsync.CallbackHandler allocListener = new RMCallbackHandler(); @@ -537,7 +537,7 @@ public class ApplicationMaster { } try { resourceManager.unregisterApplicationMaster(appStatus, appMessage, null); - } catch (YarnRemoteException ex) { + } catch (YarnException ex) { LOG.error("Failed to unregister application", ex); } catch (IOException e) { LOG.error("Failed to unregister application", e); @@ -777,7 +777,7 @@ public class ApplicationMaster { startReq.setContainerToken(container.getContainerToken()); try { cm.startContainer(startReq); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.info("Start container failed for :" + ", containerId=" + container.getId()); e.printStackTrace(); @@ -802,7 +802,7 @@ public class ApplicationMaster { // LOG.info("Container Status" // + ", id=" + container.getId() // + ", status=" +statusResp.getStatus()); - // } catch (YarnRemoteException e) { + // } catch (YarnException e) { // e.printStackTrace(); // } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java index 8cbf2de05e3..4c1a23e9ebc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java @@ -62,7 +62,7 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.client.YarnClientImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.Records; @@ -312,9 +312,9 @@ public class Client extends YarnClientImpl { * Main run function for the client * @return true if application completed successfully * @throws IOException - * @throws YarnRemoteException + * @throws YarnException */ - public boolean run() throws IOException, YarnRemoteException { + public boolean run() throws IOException, YarnException { LOG.info("Running Client"); start(); @@ -591,11 +591,11 @@ public class Client extends YarnClientImpl { * Kill application if time expires. * @param appId Application Id of application to be monitored * @return true if application completed successfully - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ private boolean monitorApplication(ApplicationId appId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { while (true) { @@ -656,11 +656,11 @@ public class Client extends YarnClientImpl { /** * Kill a submitted application by sending a call to the ASM * @param appId Application Id to be killed. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ private void forceKillApplication(ApplicationId appId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // TODO clarify whether multiple jobs with the same app id can be submitted and be running at // the same time. // If yes, can we kill a particular attempt only? diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java index a5de27a51f1..a01370a35b7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java @@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.YarnClientImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; /** @@ -268,7 +268,7 @@ public class UnmanagedAMLauncher { amProc.destroy(); } - public boolean run() throws IOException, YarnRemoteException { + public boolean run() throws IOException, YarnException { LOG.info("Starting Client"); // Connect to ResourceManager @@ -353,11 +353,11 @@ public class UnmanagedAMLauncher { * @param appId * Application Id of application to be monitored * @return true if application completed successfully - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ private ApplicationReport monitorApplication(ApplicationId appId, - Set finalState) throws YarnRemoteException, + Set finalState) throws YarnException, IOException { long foundAMCompletedTime = 0; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClient.java index e56d5c3fb8e..5b4ba5c5e8b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClient.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.Service; import com.google.common.collect.ImmutableList; @@ -113,14 +113,14 @@ public interface AMRMClient extends Servi * @param appHostPort Port master is listening on * @param appTrackingUrl URL at which the master info can be seen * @return RegisterApplicationMasterResponse - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public RegisterApplicationMasterResponse registerApplicationMaster(String appHostName, int appHostPort, String appTrackingUrl) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** * Request additional containers and receive new container allocations. @@ -134,24 +134,24 @@ public interface AMRMClient extends Servi * App should not make concurrent allocate requests. May cause request loss. * @param progressIndicator Indicates progress made by the master * @return the response of the allocate request - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public AllocateResponse allocate(float progressIndicator) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** * Unregister the application master. This must be called in the end. * @param appStatus Success/Failure status of the master * @param appMessage Diagnostics message on failure * @param appTrackingUrl New URL to get master info - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public void unregisterApplicationMaster(FinalApplicationStatus appStatus, String appMessage, String appTrackingUrl) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** * Request containers for resources before calling allocate diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientAsync.java index 02520d91ff1..c24e3ba81a5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientAsync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientAsync.java @@ -31,7 +31,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.AbstractService; import com.google.common.annotations.VisibleForTesting; @@ -152,7 +152,7 @@ public class AMRMClientAsync extends AbstractService @Override public void stop() { if (Thread.currentThread() == handlerThread) { - throw new YarnException("Cannot call stop from callback handler thread!"); + throw new YarnRuntimeException("Cannot call stop from callback handler thread!"); } keepRunning = false; try { @@ -184,12 +184,12 @@ public class AMRMClientAsync extends AbstractService /** * Registers this application master with the resource manager. On successful * registration, starts the heartbeating thread. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public RegisterApplicationMasterResponse registerApplicationMaster( String appHostName, int appHostPort, String appTrackingUrl) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RegisterApplicationMasterResponse response = client .registerApplicationMaster(appHostName, appHostPort, appTrackingUrl); heartbeatThread.start(); @@ -201,11 +201,11 @@ public class AMRMClientAsync extends AbstractService * @param appStatus Success/Failure status of the master * @param appMessage Diagnostics message on failure * @param appTrackingUrl New URL to get master info - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public void unregisterApplicationMaster(FinalApplicationStatus appStatus, - String appMessage, String appTrackingUrl) throws YarnRemoteException, + String appMessage, String appTrackingUrl) throws YarnException, IOException { synchronized (unregisterHeartbeatLock) { keepRunning = false; @@ -277,7 +277,7 @@ public class AMRMClientAsync extends AbstractService try { response = client.allocate(progress); - } catch (YarnRemoteException ex) { + } catch (YarnException ex) { LOG.error("Yarn exception on heartbeat", ex); savedException = ex; // interrupt handler thread in case it waiting on the queue diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientImpl.java index 3b09c4a8038..eee0191b838 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientImpl.java @@ -40,7 +40,7 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; @@ -55,7 +55,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -168,7 +168,7 @@ public class AMRMClientImpl try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } // CurrentUser should already have AMToken loaded. @@ -194,7 +194,7 @@ public class AMRMClientImpl @Override public RegisterApplicationMasterResponse registerApplicationMaster( String appHostName, int appHostPort, String appTrackingUrl) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // do this only once ??? RegisterApplicationMasterRequest request = recordFactory .newRecordInstance(RegisterApplicationMasterRequest.class); @@ -213,7 +213,7 @@ public class AMRMClientImpl @Override public AllocateResponse allocate(float progressIndicator) - throws YarnRemoteException, IOException { + throws YarnException, IOException { AllocateResponse allocateResponse = null; ArrayList askList = null; ArrayList releaseList = null; @@ -267,7 +267,7 @@ public class AMRMClientImpl @Override public void unregisterApplicationMaster(FinalApplicationStatus appStatus, - String appMessage, String appTrackingUrl) throws YarnRemoteException, + String appMessage, String appTrackingUrl) throws YarnException, IOException { FinishApplicationMasterRequest request = recordFactory .newRecordInstance(FinishApplicationMasterRequest.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClient.java index d5e94d59585..0e45aa65b9f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClient.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Token; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.Service; @InterfaceAudience.Public @@ -51,12 +51,12 @@ public interface NMClient extends Service { * NodeManager to launch the * container * @return a map between the auxiliary service names and their outputs - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ Map startContainer(Container container, ContainerLaunchContext containerLaunchContext) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

Stop an started container.

@@ -65,11 +65,11 @@ public interface NMClient extends Service { * @param nodeId the Id of the NodeManager * @param containerToken the security token to verify authenticity of the * started container - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ void stopContainer(ContainerId containerId, NodeId nodeId, - Token containerToken) throws YarnRemoteException, IOException; + Token containerToken) throws YarnException, IOException; /** *

Query the status of a container.

@@ -79,11 +79,11 @@ public interface NMClient extends Service { * @param containerToken the security token to verify authenticity of the * started container * @return the status of a container - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ ContainerStatus getContainerStatus(ContainerId containerId, NodeId nodeId, - Token containerToken) throws YarnRemoteException, IOException; + Token containerToken) throws YarnException, IOException; /** *

Set whether the containers that are started by this client, and are diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientAsync.java index af8f0464dc2..372ce22e15d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientAsync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientAsync.java @@ -51,7 +51,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AbstractEvent; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.state.InvalidStateTransitonException; @@ -446,7 +446,7 @@ public class NMClientAsync extends AbstractService { + "Container " + containerId, thr); } return ContainerState.RUNNING; - } catch (YarnRemoteException e) { + } catch (YarnException e) { return onExceptionRaised(container, event, e); } catch (IOException e) { return onExceptionRaised(container, event, e); @@ -490,7 +490,7 @@ public class NMClientAsync extends AbstractService { + "Container " + event.getContainerId(), thr); } return ContainerState.DONE; - } catch (YarnRemoteException e) { + } catch (YarnException e) { return onExceptionRaised(container, event, e); } catch (IOException e) { return onExceptionRaised(container, event, e); @@ -602,7 +602,7 @@ public class NMClientAsync extends AbstractService { "Unchecked exception is thrown from onContainerStatusReceived" + " for Container " + event.getContainerId(), thr); } - } catch (YarnRemoteException e) { + } catch (YarnException e) { onExceptionRaised(containerId, e); } catch (IOException e) { onExceptionRaised(containerId, e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientImpl.java index 1a564f4a487..5c4b810aaa1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientImpl.java @@ -44,7 +44,7 @@ import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; @@ -112,7 +112,7 @@ public class NMClientImpl extends AbstractService implements NMClient { stopContainer(startedContainer.getContainerId(), startedContainer.getNodeId(), startedContainer.getContainerToken()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.error("Failed to stop Container " + startedContainer.getContainerId() + "when stopping NMClientImpl"); @@ -213,7 +213,7 @@ public class NMClientImpl extends AbstractService implements NMClient { public synchronized Map startContainer( Container container, ContainerLaunchContext containerLaunchContext) - throws YarnRemoteException, IOException { + throws YarnException, IOException { if (!container.getId().equals(containerId)) { throw new IllegalArgumentException( "NMCommunicator's containerId mismatches the given Container's"); @@ -228,7 +228,7 @@ public class NMClientImpl extends AbstractService implements NMClient { if (LOG.isDebugEnabled()) { LOG.debug("Started Container " + containerId); } - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.warn("Container " + containerId + " failed to start", e); throw e; } catch (IOException e) { @@ -238,7 +238,7 @@ public class NMClientImpl extends AbstractService implements NMClient { return startResponse.getAllServiceResponse(); } - public synchronized void stopContainer() throws YarnRemoteException, + public synchronized void stopContainer() throws YarnException, IOException { try { StopContainerRequest stopRequest = @@ -248,7 +248,7 @@ public class NMClientImpl extends AbstractService implements NMClient { if (LOG.isDebugEnabled()) { LOG.debug("Stopped Container " + containerId); } - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.warn("Container " + containerId + " failed to stop", e); throw e; } catch (IOException e) { @@ -258,7 +258,7 @@ public class NMClientImpl extends AbstractService implements NMClient { } public synchronized ContainerStatus getContainerStatus() - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetContainerStatusResponse statusResponse = null; try { GetContainerStatusRequest statusRequest = @@ -268,7 +268,7 @@ public class NMClientImpl extends AbstractService implements NMClient { if (LOG.isDebugEnabled()) { LOG.debug("Got the status of Container " + containerId); } - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.warn( "Unable to get the status of Container " + containerId, e); throw e; @@ -284,7 +284,7 @@ public class NMClientImpl extends AbstractService implements NMClient { @Override public Map startContainer( Container container, ContainerLaunchContext containerLaunchContext) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // Do synchronization on StartedContainer to prevent race condition // between startContainer and stopContainer synchronized (addStartedContainer(container)) { @@ -297,7 +297,7 @@ public class NMClientImpl extends AbstractService implements NMClient { nmCommunicator.start(); allServiceResponse = nmCommunicator.startContainer(container, containerLaunchContext); - } catch (YarnRemoteException e) { + } catch (YarnException e) { // Remove the started container if it failed to start removeStartedContainer(container.getId()); throw e; @@ -326,7 +326,7 @@ public class NMClientImpl extends AbstractService implements NMClient { @Override public void stopContainer(ContainerId containerId, NodeId nodeId, - Token containerToken) throws YarnRemoteException, IOException { + Token containerToken) throws YarnException, IOException { StartedContainer startedContainer = getStartedContainer(containerId); if (startedContainer == null) { throw RPCUtil.getRemoteException("Container " + containerId + @@ -359,7 +359,7 @@ public class NMClientImpl extends AbstractService implements NMClient { @Override public ContainerStatus getContainerStatus(ContainerId containerId, NodeId nodeId, Token containerToken) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NMCommunicator nmCommunicator = null; try { nmCommunicator = new NMCommunicator(containerId, nodeId, containerToken); @@ -375,7 +375,7 @@ public class NMClientImpl extends AbstractService implements NMClient { } protected synchronized StartedContainer addStartedContainer( - Container container) throws YarnRemoteException, IOException { + Container container) throws YarnException, IOException { if (startedContainers.containsKey(container.getId())) { throw RPCUtil.getRemoteException("Container " + container.getId() + " is already started"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/RMAdmin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/RMAdmin.java index b9be15944f7..4ed99011174 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/RMAdmin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/RMAdmin.java @@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.RefreshServiceAclsRequest; import org.apache.hadoop.yarn.api.protocolrecords.RefreshSuperUserGroupsConfigurationRequest; import org.apache.hadoop.yarn.api.protocolrecords.RefreshUserToGroupsMappingsRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -188,7 +188,7 @@ public class RMAdmin extends Configured implements Tool { return adminProtocol; } - private int refreshQueues() throws IOException, YarnRemoteException { + private int refreshQueues() throws IOException, YarnException { // Refresh the queue properties RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshQueuesRequest request = @@ -197,7 +197,7 @@ public class RMAdmin extends Configured implements Tool { return 0; } - private int refreshNodes() throws IOException, YarnRemoteException { + private int refreshNodes() throws IOException, YarnException { // Refresh the nodes RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshNodesRequest request = @@ -207,7 +207,7 @@ public class RMAdmin extends Configured implements Tool { } private int refreshUserToGroupsMappings() throws IOException, - YarnRemoteException { + YarnException { // Refresh the user-to-groups mappings RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshUserToGroupsMappingsRequest request = @@ -217,7 +217,7 @@ public class RMAdmin extends Configured implements Tool { } private int refreshSuperUserGroupsConfiguration() throws IOException, - YarnRemoteException { + YarnException { // Refresh the super-user groups RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshSuperUserGroupsConfigurationRequest request = @@ -226,7 +226,7 @@ public class RMAdmin extends Configured implements Tool { return 0; } - private int refreshAdminAcls() throws IOException, YarnRemoteException { + private int refreshAdminAcls() throws IOException, YarnException { // Refresh the admin acls RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshAdminAclsRequest request = @@ -235,7 +235,7 @@ public class RMAdmin extends Configured implements Tool { return 0; } - private int refreshServiceAcls() throws IOException, YarnRemoteException { + private int refreshServiceAcls() throws IOException, YarnException { // Refresh the service acls RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshServiceAclsRequest request = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java index b7cf5a07171..53303b1e540 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java @@ -33,7 +33,7 @@ import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.Service; @InterfaceAudience.Public @@ -58,10 +58,10 @@ public interface YarnClient extends Service { * * @return response containing the new ApplicationId to be used * to submit an application - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - GetNewApplicationResponse getNewApplication() throws YarnRemoteException, + GetNewApplicationResponse getNewApplication() throws YarnException, IOException; /** @@ -75,12 +75,12 @@ public interface YarnClient extends Service { * {@link ApplicationSubmissionContext} containing all the details * needed to submit a new application * @return {@link ApplicationId} of the accepted application - * @throws YarnRemoteException + * @throws YarnException * @throws IOException * @see #getNewApplication() */ ApplicationId submitApplication(ApplicationSubmissionContext appContext) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

@@ -89,13 +89,13 @@ public interface YarnClient extends Service { * * @param applicationId * {@link ApplicationId} of the application that needs to be killed - * @throws YarnRemoteException + * @throws YarnException * in case of errors or if YARN rejects the request due to * access-control restrictions. * @throws IOException * @see #getQueueAclsInfo() */ - void killApplication(ApplicationId applicationId) throws YarnRemoteException, + void killApplication(ApplicationId applicationId) throws YarnException, IOException; /** @@ -125,11 +125,11 @@ public interface YarnClient extends Service { * @param appId * {@link ApplicationId} of the application that needs a report * @return application report - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ ApplicationReport getApplicationReport(ApplicationId appId) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

@@ -143,10 +143,10 @@ public interface YarnClient extends Service { *

* * @return a list of reports of all running applications - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getApplicationList() throws YarnRemoteException, + List getApplicationList() throws YarnException, IOException; /** @@ -155,10 +155,10 @@ public interface YarnClient extends Service { *

* * @return cluster metrics - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - YarnClusterMetrics getYarnClusterMetrics() throws YarnRemoteException, + YarnClusterMetrics getYarnClusterMetrics() throws YarnException, IOException; /** @@ -167,10 +167,10 @@ public interface YarnClient extends Service { *

* * @return A list of report of all nodes - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getNodeReports() throws YarnRemoteException, IOException; + List getNodeReports() throws YarnException, IOException; /** *

@@ -181,11 +181,11 @@ public interface YarnClient extends Service { * securely talking to YARN. * @return a delegation token ({@link Token}) that can be used to * talk to YARN - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ Token getRMDelegationToken(Text renewer) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

@@ -195,12 +195,12 @@ public interface YarnClient extends Service { * @param queueName * Name of the queue whose information is needed * @return queue information - * @throws YarnRemoteException + * @throws YarnException * in case of errors or if YARN rejects the request due to * access-control restrictions. * @throws IOException */ - QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, + QueueInfo getQueueInfo(String queueName) throws YarnException, IOException; /** @@ -210,10 +210,10 @@ public interface YarnClient extends Service { *

* * @return a list of queue-information for all queues - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getAllQueues() throws YarnRemoteException, IOException; + List getAllQueues() throws YarnException, IOException; /** *

@@ -221,10 +221,10 @@ public interface YarnClient extends Service { *

* * @return a list of queue-information for all the top-level queues - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getRootQueueInfos() throws YarnRemoteException, IOException; + List getRootQueueInfos() throws YarnException, IOException; /** *

@@ -236,10 +236,10 @@ public interface YarnClient extends Service { * Name of the queue whose child-queues' information is needed * @return a list of queue-information for all queues who are direct children * of the given parent queue. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getChildQueueInfos(String parent) throws YarnRemoteException, + List getChildQueueInfos(String parent) throws YarnException, IOException; /** @@ -250,9 +250,9 @@ public interface YarnClient extends Service { * * @return a list of queue acls ({@link QueueUserACLInfo}) for * current user - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getQueueAclsInfo() throws YarnRemoteException, + List getQueueAclsInfo() throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java index 4eb88850c7b..aea180c0b1a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java @@ -57,7 +57,7 @@ import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.util.Records; @@ -121,7 +121,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { @Override public GetNewApplicationResponse getNewApplication() - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetNewApplicationRequest request = Records.newRecord(GetNewApplicationRequest.class); return rmClient.getNewApplication(request); @@ -130,7 +130,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { @Override public ApplicationId submitApplication(ApplicationSubmissionContext appContext) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ApplicationId applicationId = appContext.getApplicationId(); appContext.setApplicationId(applicationId); SubmitApplicationRequest request = @@ -167,7 +167,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { @Override public void killApplication(ApplicationId applicationId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { LOG.info("Killing application " + applicationId); KillApplicationRequest request = Records.newRecord(KillApplicationRequest.class); @@ -177,7 +177,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { @Override public ApplicationReport getApplicationReport(ApplicationId appId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetApplicationReportRequest request = Records.newRecord(GetApplicationReportRequest.class); request.setApplicationId(appId); @@ -188,7 +188,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { @Override public List getApplicationList() - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetAllApplicationsRequest request = Records.newRecord(GetAllApplicationsRequest.class); GetAllApplicationsResponse response = rmClient.getAllApplications(request); @@ -196,7 +196,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { } @Override - public YarnClusterMetrics getYarnClusterMetrics() throws YarnRemoteException, + public YarnClusterMetrics getYarnClusterMetrics() throws YarnException, IOException { GetClusterMetricsRequest request = Records.newRecord(GetClusterMetricsRequest.class); @@ -205,7 +205,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { } @Override - public List getNodeReports() throws YarnRemoteException, + public List getNodeReports() throws YarnException, IOException { GetClusterNodesRequest request = Records.newRecord(GetClusterNodesRequest.class); @@ -215,7 +215,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { @Override public Token getRMDelegationToken(Text renewer) - throws YarnRemoteException, IOException { + throws YarnException, IOException { /* get the token from RM */ GetDelegationTokenRequest rmDTRequest = Records.newRecord(GetDelegationTokenRequest.class); @@ -238,7 +238,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { } @Override - public QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, + public QueueInfo getQueueInfo(String queueName) throws YarnException, IOException { GetQueueInfoRequest request = getQueueInfoRequest(queueName, true, false, false); @@ -247,7 +247,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { } @Override - public List getQueueAclsInfo() throws YarnRemoteException, + public List getQueueAclsInfo() throws YarnException, IOException { GetQueueUserAclsInfoRequest request = Records.newRecord(GetQueueUserAclsInfoRequest.class); @@ -255,7 +255,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { } @Override - public List getAllQueues() throws YarnRemoteException, + public List getAllQueues() throws YarnException, IOException { List queues = new ArrayList(); @@ -267,7 +267,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { } @Override - public List getRootQueueInfos() throws YarnRemoteException, + public List getRootQueueInfos() throws YarnException, IOException { List queues = new ArrayList(); @@ -280,7 +280,7 @@ public class YarnClientImpl extends AbstractService implements YarnClient { @Override public List getChildQueueInfos(String parent) - throws YarnRemoteException, IOException { + throws YarnException, IOException { List queues = new ArrayList(); QueueInfo parentQueue = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java index 582b5ad7a50..6bcd804f8a3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java @@ -30,7 +30,7 @@ import org.apache.commons.cli.Options; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; public class ApplicationCLI extends YarnCLI { @@ -90,10 +90,10 @@ public class ApplicationCLI extends YarnCLI { /** * Lists all the applications present in the Resource Manager * - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - private void listAllApplications() throws YarnRemoteException, IOException { + private void listAllApplications() throws YarnException, IOException { PrintWriter writer = new PrintWriter(sysout); List appsReport = client.getApplicationList(); @@ -117,11 +117,11 @@ public class ApplicationCLI extends YarnCLI { * Kills the application with the application id as appId * * @param applicationId - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ private void killApplication(String applicationId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ApplicationId appId = ConverterUtils.toApplicationId(applicationId); sysout.println("Killing application " + applicationId); client.killApplication(appId); @@ -131,10 +131,10 @@ public class ApplicationCLI extends YarnCLI { * Prints the application report for an application id. * * @param applicationId - * @throws YarnRemoteException + * @throws YarnException */ private void printApplicationReport(String applicationId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ApplicationReport appReport = client.getApplicationReport(ConverterUtils .toApplicationId(applicationId)); // Use PrintWriter.println, which uses correct platform line ending. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java index 83033ae3fd4..b701d214b67 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java @@ -31,7 +31,7 @@ import org.apache.commons.lang.time.DateFormatUtils; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeReport; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; public class NodeCLI extends YarnCLI { @@ -83,10 +83,10 @@ public class NodeCLI extends YarnCLI { /** * Lists all the nodes present in the cluster * - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - private void listClusterNodes() throws YarnRemoteException, IOException { + private void listClusterNodes() throws YarnException, IOException { PrintWriter writer = new PrintWriter(sysout); List nodesReport = client.getNodeReports(); writer.println("Total Nodes:" + nodesReport.size()); @@ -105,9 +105,9 @@ public class NodeCLI extends YarnCLI { * Prints the node report for node id. * * @param nodeIdStr - * @throws YarnRemoteException + * @throws YarnException */ - private void printNodeStatus(String nodeIdStr) throws YarnRemoteException, + private void printNodeStatus(String nodeIdStr) throws YarnException, IOException { NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); List nodesReport = client.getNodeReports(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClient.java index f3bc1a3678e..6f3bbe1a496 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClient.java @@ -53,7 +53,7 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.AMRMClient.StoredContainerRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.service.Service.STATE; import org.apache.hadoop.yarn.util.Records; @@ -162,7 +162,7 @@ public class TestAMRMClient { } @Test (timeout=60000) - public void testAMRMClientMatchingFit() throws YarnRemoteException, IOException { + public void testAMRMClientMatchingFit() throws YarnException, IOException { AMRMClientImpl amClient = null; try { // start am rm client @@ -263,7 +263,7 @@ public class TestAMRMClient { } @Test (timeout=60000) - public void testAMRMClientMatchStorage() throws YarnRemoteException, IOException { + public void testAMRMClientMatchStorage() throws YarnException, IOException { AMRMClientImpl amClient = null; try { // start am rm client @@ -384,7 +384,7 @@ public class TestAMRMClient { } @Test (timeout=60000) - public void testAMRMClient() throws YarnRemoteException, IOException { + public void testAMRMClient() throws YarnException, IOException { AMRMClientImpl amClient = null; try { // start am rm client @@ -407,7 +407,7 @@ public class TestAMRMClient { } private void testAllocation(final AMRMClientImpl amClient) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // setup container request assertTrue(amClient.ask.size() == 0); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java index e0f9a4ec51a..451c0b49fa3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java @@ -45,7 +45,7 @@ import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -153,7 +153,7 @@ public class TestAMRMClientAsync { @SuppressWarnings("unchecked") AMRMClient client = mock(AMRMClientImpl.class); String exStr = "TestException"; - YarnRemoteException mockException = mock(YarnRemoteException.class); + YarnException mockException = mock(YarnException.class); when(mockException.getMessage()).thenReturn(exStr); when(client.allocate(anyFloat())).thenThrow(mockException); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClient.java index 8e1c3926f51..6f46ded5726 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClient.java @@ -52,7 +52,7 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.service.Service.STATE; import org.apache.hadoop.yarn.util.Records; @@ -71,7 +71,7 @@ public class TestNMClient { int nodeCount = 3; @Before - public void setup() throws YarnRemoteException, IOException { + public void setup() throws YarnException, IOException { // start minicluster conf = new YarnConfiguration(); yarnCluster = @@ -175,7 +175,7 @@ public class TestNMClient { @Test (timeout = 60000) public void testNMClient() - throws YarnRemoteException, IOException { + throws YarnException, IOException { rmClient.registerApplicationMaster("Host", 10000, ""); @@ -187,7 +187,7 @@ public class TestNMClient { private Set allocateContainers( AMRMClientImpl rmClient, int num) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // setup container request Resource capability = Resource.newInstance(1024, 0); Priority priority = Priority.newInstance(0); @@ -228,7 +228,7 @@ public class TestNMClient { } private void testContainerManagement(NMClientImpl nmClient, - Set containers) throws YarnRemoteException, IOException { + Set containers) throws YarnException, IOException { int size = containers.size(); int i = 0; for (Container container : containers) { @@ -238,7 +238,7 @@ public class TestNMClient { nmClient.getContainerStatus(container.getId(), container.getNodeId(), container.getContainerToken()); fail("Exception is expected"); - } catch (YarnRemoteException e) { + } catch (YarnException e) { assertTrue("The thrown exception is not expected", e.getMessage().contains("is not handled by this NodeManager")); } @@ -249,7 +249,7 @@ public class TestNMClient { nmClient.stopContainer(container.getId(), container.getNodeId(), container.getContainerToken()); fail("Exception is expected"); - } catch (YarnRemoteException e) { + } catch (YarnException e) { assertTrue("The thrown exception is not expected", e.getMessage().contains( "is either not started yet or already stopped")); @@ -265,7 +265,7 @@ public class TestNMClient { clc.setTokens(securityTokens); try { nmClient.startContainer(container, clc); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Exception is not expected"); } @@ -278,7 +278,7 @@ public class TestNMClient { try { nmClient.stopContainer(container.getId(), container.getNodeId(), container.getContainerToken()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Exception is not expected"); } @@ -299,7 +299,7 @@ public class TestNMClient { private void testGetContainerStatus(Container container, int index, ContainerState state, String diagnostics, int exitStatus) - throws YarnRemoteException, IOException { + throws YarnException, IOException { while (true) { try { ContainerStatus status = nmClient.getContainerStatus( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClientAsync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClientAsync.java index e831e9823b8..cec7a819b6a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClientAsync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClientAsync.java @@ -48,7 +48,7 @@ import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -136,7 +136,7 @@ public class TestNMClientAsync { Collections.synchronizedSet(new HashSet()); protected MockNMClientAsync1(int expectedSuccess, int expectedFailure) - throws YarnRemoteException, IOException { + throws YarnException, IOException { super(MockNMClientAsync1.class.getName(), mockNMClient(0), new TestCallbackHandler1(expectedSuccess, expectedFailure)); } @@ -361,7 +361,7 @@ public class TestNMClientAsync { } private NMClient mockNMClient(int mode) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NMClient client = mock(NMClient.class); switch (mode) { case 0: @@ -436,7 +436,7 @@ public class TestNMClientAsync { private CyclicBarrier barrierB; protected MockNMClientAsync2(CyclicBarrier barrierA, CyclicBarrier barrierB, - CyclicBarrier barrierC) throws YarnRemoteException, IOException { + CyclicBarrier barrierC) throws YarnException, IOException { super(MockNMClientAsync2.class.getName(), mockNMClient(0), new TestCallbackHandler2(barrierC)); this.barrierA = barrierA; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java index 2700039648a..fbc876aa45b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java @@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -95,7 +95,7 @@ public class TestYarnClient { ((MockYarnClient) client).setYarnApplicationState(exitStates[i]); try { client.submitApplication(context); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.fail("Exception is not expected."); } catch (IOException e) { Assert.fail("Exception is not expected."); @@ -153,7 +153,7 @@ public class TestYarnClient { try{ when(rmClient.getApplicationReport(any( GetApplicationReportRequest.class))).thenReturn(mockResponse); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.fail("Exception is not expected."); } catch (IOException e) { Assert.fail("Exception is not expected."); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/AMRMProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/AMRMProtocolPBClientImpl.java index 5ab35d32c94..064fe82d177 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/AMRMProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/AMRMProtocolPBClientImpl.java @@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMaste import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMasterResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto; @@ -68,7 +68,7 @@ public class AMRMProtocolPBClientImpl implements AMRMProtocol, Closeable { @Override public AllocateResponse allocate(AllocateRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { AllocateRequestProto requestProto = ((AllocateRequestPBImpl) request).getProto(); try { @@ -81,7 +81,7 @@ public class AMRMProtocolPBClientImpl implements AMRMProtocol, Closeable { @Override public FinishApplicationMasterResponse finishApplicationMaster( - FinishApplicationMasterRequest request) throws YarnRemoteException, + FinishApplicationMasterRequest request) throws YarnException, IOException { FinishApplicationMasterRequestProto requestProto = ((FinishApplicationMasterRequestPBImpl) request).getProto(); @@ -96,7 +96,7 @@ public class AMRMProtocolPBClientImpl implements AMRMProtocol, Closeable { @Override public RegisterApplicationMasterResponse registerApplicationMaster( - RegisterApplicationMasterRequest request) throws YarnRemoteException, + RegisterApplicationMasterRequest request) throws YarnException, IOException { RegisterApplicationMasterRequestProto requestProto = ((RegisterApplicationMasterRequestPBImpl) request).getProto(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java index 096bedf7438..83943242fcd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java @@ -78,7 +78,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenRe import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; @@ -113,7 +113,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public KillApplicationResponse forceKillApplication( - KillApplicationRequest request) throws YarnRemoteException, IOException { + KillApplicationRequest request) throws YarnException, IOException { KillApplicationRequestProto requestProto = ((KillApplicationRequestPBImpl) request).getProto(); try { @@ -127,7 +127,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public GetApplicationReportResponse getApplicationReport( - GetApplicationReportRequest request) throws YarnRemoteException, + GetApplicationReportRequest request) throws YarnException, IOException { GetApplicationReportRequestProto requestProto = ((GetApplicationReportRequestPBImpl) request).getProto(); @@ -142,7 +142,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public GetClusterMetricsResponse getClusterMetrics( - GetClusterMetricsRequest request) throws YarnRemoteException, + GetClusterMetricsRequest request) throws YarnException, IOException { GetClusterMetricsRequestProto requestProto = ((GetClusterMetricsRequestPBImpl) request).getProto(); @@ -157,7 +157,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public GetNewApplicationResponse getNewApplication( - GetNewApplicationRequest request) throws YarnRemoteException, + GetNewApplicationRequest request) throws YarnException, IOException { GetNewApplicationRequestProto requestProto = ((GetNewApplicationRequestPBImpl) request).getProto(); @@ -172,7 +172,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public SubmitApplicationResponse submitApplication( - SubmitApplicationRequest request) throws YarnRemoteException, + SubmitApplicationRequest request) throws YarnException, IOException { SubmitApplicationRequestProto requestProto = ((SubmitApplicationRequestPBImpl) request).getProto(); @@ -187,7 +187,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public GetAllApplicationsResponse getAllApplications( - GetAllApplicationsRequest request) throws YarnRemoteException, + GetAllApplicationsRequest request) throws YarnException, IOException { GetAllApplicationsRequestProto requestProto = ((GetAllApplicationsRequestPBImpl) request).getProto(); @@ -203,7 +203,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetClusterNodesRequestProto requestProto = ((GetClusterNodesRequestPBImpl) request).getProto(); try { @@ -217,7 +217,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetQueueInfoRequestProto requestProto = ((GetQueueInfoRequestPBImpl) request).getProto(); try { @@ -231,7 +231,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public GetQueueUserAclsInfoResponse getQueueUserAcls( - GetQueueUserAclsInfoRequest request) throws YarnRemoteException, + GetQueueUserAclsInfoRequest request) throws YarnException, IOException { GetQueueUserAclsInfoRequestProto requestProto = ((GetQueueUserAclsInfoRequestPBImpl) request).getProto(); @@ -246,7 +246,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public GetDelegationTokenResponse getDelegationToken( - GetDelegationTokenRequest request) throws YarnRemoteException, + GetDelegationTokenRequest request) throws YarnException, IOException { GetDelegationTokenRequestProto requestProto = ((GetDelegationTokenRequestPBImpl) request).getProto(); @@ -261,7 +261,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public RenewDelegationTokenResponse renewDelegationToken( - RenewDelegationTokenRequest request) throws YarnRemoteException, + RenewDelegationTokenRequest request) throws YarnException, IOException { RenewDelegationTokenRequestProto requestProto = ((RenewDelegationTokenRequestPBImpl) request).getProto(); @@ -276,7 +276,7 @@ public class ClientRMProtocolPBClientImpl implements ClientRMProtocol, @Override public CancelDelegationTokenResponse cancelDelegationToken( - CancelDelegationTokenRequest request) throws YarnRemoteException, + CancelDelegationTokenRequest request) throws YarnException, IOException { CancelDelegationTokenRequestProto requestProto = ((CancelDelegationTokenRequestPBImpl) request).getProto(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagerPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagerPBClientImpl.java index 2f16479a88c..89213755e02 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagerPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagerPBClientImpl.java @@ -42,7 +42,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainerResponse import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainerRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainerResponsePBImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto; @@ -86,7 +86,7 @@ public class ContainerManagerPBClientImpl implements ContainerManager, @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException, + GetContainerStatusRequest request) throws YarnException, IOException { GetContainerStatusRequestProto requestProto = ((GetContainerStatusRequestPBImpl) request).getProto(); @@ -101,7 +101,7 @@ public class ContainerManagerPBClientImpl implements ContainerManager, @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { StartContainerRequestProto requestProto = ((StartContainerRequestPBImpl) request).getProto(); try { @@ -115,7 +115,7 @@ public class ContainerManagerPBClientImpl implements ContainerManager, @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { StopContainerRequestProto requestProto = ((StopContainerRequestPBImpl) request).getProto(); try { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/RMAdminProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/RMAdminProtocolPBClientImpl.java index 3d4f7fd1d2f..29b51a6e12b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/RMAdminProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/RMAdminProtocolPBClientImpl.java @@ -52,7 +52,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshSuperUserGroups import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto; @@ -87,7 +87,7 @@ public class RMAdminProtocolPBClientImpl implements RMAdminProtocol, Closeable { @Override public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RefreshQueuesRequestProto requestProto = ((RefreshQueuesRequestPBImpl)request).getProto(); try { @@ -101,7 +101,7 @@ public class RMAdminProtocolPBClientImpl implements RMAdminProtocol, Closeable { @Override public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RefreshNodesRequestProto requestProto = ((RefreshNodesRequestPBImpl)request).getProto(); try { @@ -116,7 +116,7 @@ public class RMAdminProtocolPBClientImpl implements RMAdminProtocol, Closeable { @Override public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration( RefreshSuperUserGroupsConfigurationRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RefreshSuperUserGroupsConfigurationRequestProto requestProto = ((RefreshSuperUserGroupsConfigurationRequestPBImpl)request).getProto(); try { @@ -130,7 +130,7 @@ public class RMAdminProtocolPBClientImpl implements RMAdminProtocol, Closeable { @Override public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( - RefreshUserToGroupsMappingsRequest request) throws YarnRemoteException, + RefreshUserToGroupsMappingsRequest request) throws YarnException, IOException { RefreshUserToGroupsMappingsRequestProto requestProto = ((RefreshUserToGroupsMappingsRequestPBImpl)request).getProto(); @@ -145,7 +145,7 @@ public class RMAdminProtocolPBClientImpl implements RMAdminProtocol, Closeable { @Override public RefreshAdminAclsResponse refreshAdminAcls( - RefreshAdminAclsRequest request) throws YarnRemoteException, IOException { + RefreshAdminAclsRequest request) throws YarnException, IOException { RefreshAdminAclsRequestProto requestProto = ((RefreshAdminAclsRequestPBImpl)request).getProto(); try { @@ -159,7 +159,7 @@ public class RMAdminProtocolPBClientImpl implements RMAdminProtocol, Closeable { @Override public RefreshServiceAclsResponse refreshServiceAcls( - RefreshServiceAclsRequest request) throws YarnRemoteException, + RefreshServiceAclsRequest request) throws YarnException, IOException { RefreshServiceAclsRequestProto requestProto = ((RefreshServiceAclsRequestPBImpl)request).getProto(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/AMRMProtocolPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/AMRMProtocolPBServiceImpl.java index f98c031a366..dae61336a80 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/AMRMProtocolPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/AMRMProtocolPBServiceImpl.java @@ -31,7 +31,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMaste import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMasterResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto; @@ -57,7 +57,7 @@ public class AMRMProtocolPBServiceImpl implements AMRMProtocolPB { try { AllocateResponse response = real.allocate(request); return ((AllocateResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -72,7 +72,7 @@ public class AMRMProtocolPBServiceImpl implements AMRMProtocolPB { try { FinishApplicationMasterResponse response = real.finishApplicationMaster(request); return ((FinishApplicationMasterResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -87,7 +87,7 @@ public class AMRMProtocolPBServiceImpl implements AMRMProtocolPB { try { RegisterApplicationMasterResponse response = real.registerApplicationMaster(request); return ((RegisterApplicationMasterResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java index 8fb1b71335a..caad876e803 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java @@ -64,7 +64,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenRe import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; @@ -102,7 +102,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { KillApplicationResponse response = real.forceKillApplication(request); return ((KillApplicationResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -117,7 +117,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { GetApplicationReportResponse response = real.getApplicationReport(request); return ((GetApplicationReportResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -131,7 +131,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { GetClusterMetricsResponse response = real.getClusterMetrics(request); return ((GetClusterMetricsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -146,7 +146,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { GetNewApplicationResponse response = real.getNewApplication(request); return ((GetNewApplicationResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -160,7 +160,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { SubmitApplicationResponse response = real.submitApplication(request); return ((SubmitApplicationResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -176,7 +176,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { GetAllApplicationsResponse response = real.getAllApplications(request); return ((GetAllApplicationsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -191,7 +191,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { GetClusterNodesResponse response = real.getClusterNodes(request); return ((GetClusterNodesResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -206,7 +206,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { GetQueueInfoResponse response = real.getQueueInfo(request); return ((GetQueueInfoResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -222,7 +222,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { GetQueueUserAclsInfoResponse response = real.getQueueUserAcls(request); return ((GetQueueUserAclsInfoResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -238,7 +238,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { GetDelegationTokenResponse response = real.getDelegationToken(request); return ((GetDelegationTokenResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -254,7 +254,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { RenewDelegationTokenResponse response = real.renewDelegationToken(request); return ((RenewDelegationTokenResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -270,7 +270,7 @@ public class ClientRMProtocolPBServiceImpl implements ClientRMProtocolPB { try { CancelDelegationTokenResponse response = real.cancelDelegationToken(request); return ((CancelDelegationTokenResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagerPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagerPBServiceImpl.java index 19eefff1a99..398fce6ead2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagerPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagerPBServiceImpl.java @@ -31,7 +31,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainerRequestP import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainerResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainerRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainerResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto; @@ -57,7 +57,7 @@ public class ContainerManagerPBServiceImpl implements ContainerManagerPB { try { GetContainerStatusResponse response = real.getContainerStatus(request); return ((GetContainerStatusResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -71,7 +71,7 @@ public class ContainerManagerPBServiceImpl implements ContainerManagerPB { try { StartContainerResponse response = real.startContainer(request); return ((StartContainerResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -85,7 +85,7 @@ public class ContainerManagerPBServiceImpl implements ContainerManagerPB { try { StopContainerResponse response = real.stopContainer(request); return ((StopContainerResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/RMAdminProtocolPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/RMAdminProtocolPBServiceImpl.java index 385d90986e9..5450e713096 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/RMAdminProtocolPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/RMAdminProtocolPBServiceImpl.java @@ -40,7 +40,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshSuperUserGroups import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.*; import com.google.protobuf.RpcController; @@ -61,7 +61,7 @@ public class RMAdminProtocolPBServiceImpl implements RMAdminProtocolPB { try { RefreshQueuesResponse response = real.refreshQueues(request); return ((RefreshQueuesResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -77,7 +77,7 @@ public class RMAdminProtocolPBServiceImpl implements RMAdminProtocolPB { try { RefreshAdminAclsResponse response = real.refreshAdminAcls(request); return ((RefreshAdminAclsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -91,7 +91,7 @@ public class RMAdminProtocolPBServiceImpl implements RMAdminProtocolPB { try { RefreshNodesResponse response = real.refreshNodes(request); return ((RefreshNodesResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -110,7 +110,7 @@ public class RMAdminProtocolPBServiceImpl implements RMAdminProtocolPB { RefreshSuperUserGroupsConfigurationResponse response = real.refreshSuperUserGroupsConfiguration(request); return ((RefreshSuperUserGroupsConfigurationResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -127,7 +127,7 @@ public class RMAdminProtocolPBServiceImpl implements RMAdminProtocolPB { RefreshUserToGroupsMappingsResponse response = real.refreshUserToGroupsMappings(request); return ((RefreshUserToGroupsMappingsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -144,7 +144,7 @@ public class RMAdminProtocolPBServiceImpl implements RMAdminProtocolPB { RefreshServiceAclsResponse response = real.refreshServiceAcls(request); return ((RefreshServiceAclsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java index 6335a8be591..a8a9be4524e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java @@ -29,7 +29,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ShutdownHookManager; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.AbstractService; /** @@ -190,7 +190,7 @@ public class AsyncDispatcher extends AbstractService implements Dispatcher { if (!stopped) { LOG.warn("AsyncDispatcher thread interrupted", e); } - throw new YarnException(e); + throw new YarnRuntimeException(e); } }; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java index ce8d05c117e..4eadaa4f70c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java @@ -24,7 +24,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; public class RecordFactoryPBImpl implements RecordFactory { @@ -53,7 +53,7 @@ public class RecordFactoryPBImpl implements RecordFactory { try { pbClazz = localConf.getClassByName(getPBImplClassName(clazz)); } catch (ClassNotFoundException e) { - throw new YarnException("Failed to load class: [" + throw new YarnRuntimeException("Failed to load class: [" + getPBImplClassName(clazz) + "]", e); } try { @@ -61,18 +61,18 @@ public class RecordFactoryPBImpl implements RecordFactory { constructor.setAccessible(true); cache.putIfAbsent(clazz, constructor); } catch (NoSuchMethodException e) { - throw new YarnException("Could not find 0 argument constructor", e); + throw new YarnRuntimeException("Could not find 0 argument constructor", e); } } try { Object retObject = constructor.newInstance(); return (T)retObject; } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InstantiationException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java index 9fc81d2c083..41acf7defc9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java @@ -28,7 +28,7 @@ import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RpcClientFactory; public class RpcClientFactoryPBImpl implements RpcClientFactory { @@ -59,7 +59,7 @@ public class RpcClientFactoryPBImpl implements RpcClientFactory { try { pbClazz = localConf.getClassByName(getPBImplClassName(protocol)); } catch (ClassNotFoundException e) { - throw new YarnException("Failed to load class: [" + throw new YarnRuntimeException("Failed to load class: [" + getPBImplClassName(protocol) + "]", e); } try { @@ -67,18 +67,18 @@ public class RpcClientFactoryPBImpl implements RpcClientFactory { constructor.setAccessible(true); cache.putIfAbsent(protocol, constructor); } catch (NoSuchMethodException e) { - throw new YarnException("Could not find constructor with params: " + Long.TYPE + ", " + InetSocketAddress.class + ", " + Configuration.class, e); + throw new YarnRuntimeException("Could not find constructor with params: " + Long.TYPE + ", " + InetSocketAddress.class + ", " + Configuration.class, e); } } try { Object retObject = constructor.newInstance(clientVersion, addr, conf); return retObject; } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InstantiationException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -88,11 +88,11 @@ public class RpcClientFactoryPBImpl implements RpcClientFactory { Method closeMethod = proxy.getClass().getMethod("close"); closeMethod.invoke(proxy); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (Exception e) { LOG.error("Cannot call close method due to Exception. " + "Ignoring.", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java index 49c7bc7d003..54eb1dfc08b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java @@ -34,7 +34,7 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RpcServerFactory; import com.google.protobuf.BlockingService; @@ -81,7 +81,7 @@ public class RpcServerFactoryPBImpl implements RpcServerFactory { pbServiceImplClazz = localConf .getClassByName(getPbServiceImplClassName(protocol)); } catch (ClassNotFoundException e) { - throw new YarnException("Failed to load class: [" + throw new YarnRuntimeException("Failed to load class: [" + getPbServiceImplClassName(protocol) + "]", e); } try { @@ -89,7 +89,7 @@ public class RpcServerFactoryPBImpl implements RpcServerFactory { constructor.setAccessible(true); serviceCache.putIfAbsent(protocol, constructor); } catch (NoSuchMethodException e) { - throw new YarnException("Could not find constructor with params: " + throw new YarnRuntimeException("Could not find constructor with params: " + Long.TYPE + ", " + InetSocketAddress.class + ", " + Configuration.class, e); } @@ -99,11 +99,11 @@ public class RpcServerFactoryPBImpl implements RpcServerFactory { try { service = constructor.newInstance(instance); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InstantiationException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } Class pbProtocol = service.getClass().getInterfaces()[0]; @@ -113,7 +113,7 @@ public class RpcServerFactoryPBImpl implements RpcServerFactory { try { protoClazz = localConf.getClassByName(getProtoClassName(protocol)); } catch (ClassNotFoundException e) { - throw new YarnException("Failed to load class: [" + throw new YarnRuntimeException("Failed to load class: [" + getProtoClassName(protocol) + "]", e); } try { @@ -122,7 +122,7 @@ public class RpcServerFactoryPBImpl implements RpcServerFactory { method.setAccessible(true); protoCache.putIfAbsent(protocol, method); } catch (NoSuchMethodException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -130,11 +130,11 @@ public class RpcServerFactoryPBImpl implements RpcServerFactory { return createServer(pbProtocol, addr, conf, secretManager, numHandlers, (BlockingService)method.invoke(null, service), portRangeConfig); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java index 38deca667d8..09b3231d885 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java @@ -22,7 +22,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RpcClientFactory; import org.apache.hadoop.yarn.factories.RpcServerFactory; @@ -61,13 +61,13 @@ public class RpcFactoryProvider { method.setAccessible(true); return method.invoke(null, null); } catch (ClassNotFoundException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (NoSuchMethodException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/RPCUtil.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/RPCUtil.java index 4e93d03abf6..35d2acbc516 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/RPCUtil.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/RPCUtil.java @@ -23,24 +23,24 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import com.google.protobuf.ServiceException; public class RPCUtil { /** - * Returns an instance of YarnRemoteException + * Returns an instance of {@link YarnException} */ - public static YarnRemoteException getRemoteException(Throwable t) { - return new YarnRemoteException(t); + public static YarnException getRemoteException(Throwable t) { + return new YarnException(t); } /** - * Returns an instance of YarnRemoteException + * Returns an instance of {@link YarnException} */ - public static YarnRemoteException getRemoteException(String message) { - return new YarnRemoteException(message); + public static YarnException getRemoteException(String message) { + return new YarnException(message); } private static T instantiateException( @@ -74,10 +74,10 @@ public class RPCUtil { * @param se * ServiceException * @return An instance of the actual exception, which will be a subclass of - * {@link YarnRemoteException} or {@link IOException} + * {@link YarnException} or {@link IOException} */ public static Void unwrapAndThrowException(ServiceException se) - throws IOException, YarnRemoteException { + throws IOException, YarnException { Throwable cause = se.getCause(); if (cause == null) { // SE generated by the RPC layer itself. @@ -92,12 +92,12 @@ public class RPCUtil { // Assume this to be a new exception type added to YARN. This isn't // absolutely correct since the RPC layer could add an exception as // well. - throw instantiateException(YarnRemoteException.class, re); + throw instantiateException(YarnException.class, re); } - if (YarnRemoteException.class.isAssignableFrom(realClass)) { + if (YarnException.class.isAssignableFrom(realClass)) { throw instantiateException( - realClass.asSubclass(YarnRemoteException.class), re); + realClass.asSubclass(YarnException.class), re); } else if (IOException.class.isAssignableFrom(realClass)) { throw instantiateException(realClass.asSubclass(IOException.class), re); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java index 512b8d49ce5..eb37062b826 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java @@ -26,7 +26,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; /** @@ -63,7 +63,7 @@ public abstract class YarnRPC { try { return (YarnRPC) Class.forName(clazzName).newInstance(); } catch (Exception e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index 185020dc4a3..51691685d9f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -54,7 +54,7 @@ import org.apache.hadoop.io.SecureIOUtils; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.file.tfile.TFile; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -365,7 +365,7 @@ public class AggregatedLogFormat { try { aclString = valueStream.readUTF(); } catch (EOFException e) { - throw new YarnException("Error reading ACLs", e); + throw new YarnRuntimeException("Error reading ACLs", e); } acls.put(ApplicationAccessType.valueOf(appAccessOp), aclString); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java index 3a29450832e..0d1ea1229d5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java @@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; public class AdminACLsManager { @@ -69,7 +69,7 @@ public class AdminACLsManager { adminAcl.addUser(owner.getShortUserName()); } catch (IOException e){ LOG.warn("Could not add current user to admin:" + e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } aclsEnabled = conf.getBoolean(YarnConfiguration.YARN_ACL_ENABLE, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.java index 981ca5e0a9a..7ec3b9dc2a6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.java @@ -37,7 +37,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecret import org.apache.hadoop.yarn.api.ClientRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.util.Records; @@ -104,7 +104,7 @@ public class RMDelegationTokenIdentifier extends AbstractDelegationTokenIdentifi Records.newRecord(RenewDelegationTokenRequest.class); request.setDelegationToken(convertToProtoToken(token)); return rmClient.renewDelegationToken(request).getNextExpirationTime(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } finally { RPC.stopProxy(rmClient); @@ -126,7 +126,7 @@ public class RMDelegationTokenIdentifier extends AbstractDelegationTokenIdentifi Records.newRecord(CancelDelegationTokenRequest.class); request.setDelegationToken(convertToProtoToken(token)); rmClient.cancelDelegationToken(request); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } finally { RPC.stopProxy(rmClient); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/service/CompositeService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/service/CompositeService.java index cd4e52349e4..26a091d5869 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/service/CompositeService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/service/CompositeService.java @@ -26,7 +26,7 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; /** * Composition of services. @@ -75,7 +75,7 @@ public class CompositeService extends AbstractService { // call stop() on all services including failed service to make sure cleanup // happens. stop(i); - throw new YarnException("Failed to Start " + getName(), e); + throw new YarnRuntimeException("Failed to Start " + getName(), e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/InvalidStateTransitonException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/InvalidStateTransitonException.java index aeef3a213bf..8708fa47742 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/InvalidStateTransitonException.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/InvalidStateTransitonException.java @@ -18,9 +18,9 @@ package org.apache.hadoop.yarn.state; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; -public class InvalidStateTransitonException extends YarnException { +public class InvalidStateTransitonException extends YarnRuntimeException { private Enum currentState; private Enum event; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java index 01fc38cbb43..962c2b94c73 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java @@ -22,7 +22,7 @@ import java.util.Iterator; import java.util.Map; import org.apache.hadoop.util.StringInterner; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import static org.apache.hadoop.yarn.util.StringHelper.*; @@ -56,7 +56,7 @@ public class Apps { } public static void throwParseException(String name, String s) { - throw new YarnException(join("Error parsing ", name, ": ", s)); + throw new YarnRuntimeException(join("Error parsing ", name, ": ", s)); } public static void setEnvFromInputString(Map env, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebAppException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebAppException.java index 09b8bdd989d..a1f0768e012 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebAppException.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebAppException.java @@ -18,9 +18,9 @@ package org.apache.hadoop.yarn.webapp; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; -public class WebAppException extends YarnException { +public class WebAppException extends YarnRuntimeException { private static final long serialVersionUID = 1L; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java index 671f03c0ccd..5d0964bef67 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; @@ -130,7 +130,7 @@ public class TestContainerLaunchRPC { @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException { + GetContainerStatusRequest request) throws YarnException { GetContainerStatusResponse response = recordFactory .newRecordInstance(GetContainerStatusResponse.class); response.setStatus(status); @@ -139,23 +139,23 @@ public class TestContainerLaunchRPC { @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { try { // make the thread sleep to look like its not going to respond Thread.sleep(10000); } catch (Exception e) { LOG.error(e); - throw new YarnRemoteException(e); + throw new YarnException(e); } - throw new YarnRemoteException("Shouldn't happen!!"); + throw new YarnException("Shouldn't happen!!"); } @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { Exception e = new Exception("Dummy function", new Exception( "Dummy function cause")); - throw new YarnRemoteException(e); + throw new YarnException(e); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java index 502ebc50b21..c34e64773dc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java @@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; @@ -86,7 +86,7 @@ public class TestRPC { proxy.getNewApplication(Records .newRecord(GetNewApplicationRequest.class)); Assert.fail("Excepted RPC call to fail with unknown method."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.assertTrue(e.getMessage().matches( "Unknown method getNewApplication called on.*" + "org.apache.hadoop.yarn.proto.ClientRMProtocol" @@ -147,7 +147,7 @@ public class TestRPC { StopContainerRequest stopRequest = recordFactory.newRecordInstance(StopContainerRequest.class); stopRequest.setContainerId(containerId); proxy.stopContainer(stopRequest); - } catch (YarnRemoteException e) { + } catch (YarnException e) { exception = true; Assert.assertTrue(e.getMessage().contains(EXCEPTION_MSG)); Assert.assertTrue(e.getMessage().contains(EXCEPTION_CAUSE)); @@ -169,7 +169,7 @@ public class TestRPC { @Override public GetContainerStatusResponse getContainerStatus( GetContainerStatusRequest request) - throws YarnRemoteException { + throws YarnException { GetContainerStatusResponse response = recordFactory.newRecordInstance(GetContainerStatusResponse.class); response.setStatus(status); @@ -178,7 +178,7 @@ public class TestRPC { @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { Token containerToken = request.getContainerToken(); ContainerTokenIdentifier tokenId = null; @@ -199,10 +199,10 @@ public class TestRPC { @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { Exception e = new Exception(EXCEPTION_MSG, new Exception(EXCEPTION_CAUSE)); - throw new YarnRemoteException(e); + throw new YarnException(e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java index e2c0e6f583f..672416cbfda 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java @@ -33,7 +33,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; import org.junit.Test; @@ -61,7 +61,7 @@ public class TestRPCFactories { RpcServerFactoryPBImpl.get().getServer( AMRMProtocol.class, instance, addr, conf, null, 1); server.start(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -89,12 +89,12 @@ public class TestRPCFactories { AMRMProtocol amrmClient = null; try { amrmClient = (AMRMProtocol) RpcClientFactoryPBImpl.get().getClient(AMRMProtocol.class, 1, NetUtils.getConnectAddress(server), conf); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create client"); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -108,7 +108,7 @@ public class TestRPCFactories { @Override public RegisterApplicationMasterResponse registerApplicationMaster( - RegisterApplicationMasterRequest request) throws YarnRemoteException, + RegisterApplicationMasterRequest request) throws YarnException, IOException { // TODO Auto-generated method stub return null; @@ -116,7 +116,7 @@ public class TestRPCFactories { @Override public FinishApplicationMasterResponse finishApplicationMaster( - FinishApplicationMasterRequest request) throws YarnRemoteException, + FinishApplicationMasterRequest request) throws YarnException, IOException { // TODO Auto-generated method stub return null; @@ -124,7 +124,7 @@ public class TestRPCFactories { @Override public AllocateResponse allocate(AllocateRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // TODO Auto-generated method stub return null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java index 19c50ce1cf5..cb498940bff 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java @@ -38,7 +38,7 @@ public class TestRecordFactory { AllocateResponse response = pbRecordFactory.newRecordInstance(AllocateResponse.class); Assert.assertEquals(AllocateResponsePBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } @@ -47,7 +47,7 @@ public class TestRecordFactory { AllocateRequest response = pbRecordFactory.newRecordInstance(AllocateRequest.class); Assert.assertEquals(AllocateRequestPBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java index db5caa93194..ad5afe4e73f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java @@ -50,12 +50,12 @@ public class TestRpcFactoryProvider { try { clientFactory = RpcFactoryProvider.getClientFactory(conf); Assert.fail("Expected an exception - unknown serializer"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { } try { serverFactory = RpcFactoryProvider.getServerFactory(conf); Assert.fail("Expected an exception - unknown serializer"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { } conf = new Configuration(); @@ -65,11 +65,11 @@ public class TestRpcFactoryProvider { try { clientFactory = RpcFactoryProvider.getClientFactory(conf); Assert.fail("Expected an exception - unknown class"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { } try { serverFactory = RpcFactoryProvider.getServerFactory(conf); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { Assert.fail("Error while loading factory using reflection: [" + RpcServerFactoryPBImpl.class.getName() + "]"); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestYarnUncaughtExceptionHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestYarnUncaughtExceptionHandler.java index 809ef7caa7c..68c43644fb0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestYarnUncaughtExceptionHandler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestYarnUncaughtExceptionHandler.java @@ -30,7 +30,7 @@ public class TestYarnUncaughtExceptionHandler { private static final YarnUncaughtExceptionHandler exHandler = new YarnUncaughtExceptionHandler(); /** - * Throw {@code YarnException} inside thread and + * Throw {@code YarnRuntimeException} inside thread and * check {@code YarnUncaughtExceptionHandler} instance * * @throws InterruptedException @@ -39,7 +39,7 @@ public class TestYarnUncaughtExceptionHandler { public void testUncaughtExceptionHandlerWithRuntimeException() throws InterruptedException { final YarnUncaughtExceptionHandler spyYarnHandler = spy(exHandler); - final YarnException yarnException = new YarnException( + final YarnRuntimeException yarnException = new YarnRuntimeException( "test-yarn-runtime-exception"); final Thread yarnThread = new Thread(new Runnable() { @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java index 82e20cd00c0..3131c7b5cdf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java @@ -24,7 +24,7 @@ import java.io.IOException; import junit.framework.Assert; import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.junit.Test; import com.google.protobuf.ServiceException; @@ -33,7 +33,7 @@ public class TestRPCUtil { @Test public void testUnknownExceptionUnwrapping() { - Class exception = YarnRemoteException.class; + Class exception = YarnException.class; String className = "UnknownException.class"; verifyRemoteExceptionUnwrapping(exception, className); } @@ -53,7 +53,7 @@ public class TestRPCUtil { @Test public void testRemoteYarnExceptionUnwrapping() { - Class exception = YarnRemoteException.class; + Class exception = YarnException.class; verifyRemoteExceptionUnwrapping(exception, exception.getName()); } @@ -73,7 +73,7 @@ public class TestRPCUtil { @Test public void testRemoteYarnExceptionWithoutStringConstructor() { - // Derivatives of YarnException should always defined a string constructor. + // Derivatives of YarnException should always define a string constructor. Class exception = YarnTestExceptionNoConstructor.class; verifyRemoteExceptionUnwrapping(RemoteException.class, exception.getName()); } @@ -131,7 +131,7 @@ public class TestRPCUtil { .getMessage().contains(message)); } - private static class YarnTestException extends YarnRemoteException { + private static class YarnTestException extends YarnException { private static final long serialVersionUID = 1L; @SuppressWarnings("unused") @@ -141,7 +141,7 @@ public class TestRPCUtil { } private static class YarnTestExceptionNoConstructor extends - YarnRemoteException { + YarnException { private static final long serialVersionUID = 1L; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestCompositeService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestCompositeService.java index 67c2de16419..0fc598ad7f7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestCompositeService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestCompositeService.java @@ -22,7 +22,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.CompositeService; import org.apache.hadoop.yarn.service.Service.STATE; import org.junit.Before; @@ -129,7 +129,7 @@ public class TestCompositeService { try { serviceManager.start(); fail("Exception should have been thrown due to startup failure of last service"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { for (int i = 0; i < NUM_OF_SERVICES - 1; i++) { if (i >= FAILED_SERVICE_SEQ_NUMBER) { // Failed service state should be INITED @@ -170,7 +170,7 @@ public class TestCompositeService { // Stop the composite service try { serviceManager.stop(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { for (int i = 0; i < NUM_OF_SERVICES - 1; i++) { assertEquals("Service state should have been ", STATE.STOPPED, services[NUM_OF_SERVICES].getServiceState()); @@ -202,7 +202,7 @@ public class TestCompositeService { @Override public synchronized void start() { if (throwExceptionOnStart) { - throw new YarnException("Fake service start exception"); + throw new YarnRuntimeException("Fake service start exception"); } counter++; callSequenceNumber = counter; @@ -214,7 +214,7 @@ public class TestCompositeService { counter++; callSequenceNumber = counter; if (throwExceptionOnStop) { - throw new YarnException("Fake service stop exception"); + throw new YarnRuntimeException("Fake service stop exception"); } super.stop(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ResourceTracker.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ResourceTracker.java index 51f8198f9df..56cc3179e4b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ResourceTracker.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ResourceTracker.java @@ -19,7 +19,7 @@ package org.apache.hadoop.yarn.server.api; import java.io.IOException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; @@ -28,10 +28,10 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResp public interface ResourceTracker { public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException; public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java index b6382840bd9..396204cf2db 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java @@ -24,7 +24,7 @@ import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto; @@ -53,7 +53,7 @@ private ResourceTrackerPB proxy; @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerRequestProto requestProto = ((RegisterNodeManagerRequestPBImpl)request).getProto(); try { @@ -66,7 +66,7 @@ private ResourceTrackerPB proxy; @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeHeartbeatRequestProto requestProto = ((NodeHeartbeatRequestPBImpl)request).getProto(); try { return new NodeHeartbeatResponsePBImpl(proxy.nodeHeartbeat(null, requestProto)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceTrackerPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceTrackerPBServiceImpl.java index 4d0d3e782cb..442e3c80591 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceTrackerPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceTrackerPBServiceImpl.java @@ -20,7 +20,7 @@ package org.apache.hadoop.yarn.server.api.impl.pb.service; import java.io.IOException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto; @@ -53,7 +53,7 @@ public class ResourceTrackerPBServiceImpl implements ResourceTrackerPB { try { RegisterNodeManagerResponse response = real.registerNodeManager(request); return ((RegisterNodeManagerResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -67,7 +67,7 @@ public class ResourceTrackerPBServiceImpl implements ResourceTrackerPB { try { NodeHeartbeatResponse response = real.nodeHeartbeat(request); return ((NodeHeartbeatResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java index 63d74f30620..33ac78323c1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java @@ -26,8 +26,8 @@ import junit.framework.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.YarnRuntimeException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; import org.apache.hadoop.yarn.server.api.ResourceTracker; @@ -60,7 +60,7 @@ public class TestRPCFactories { RpcServerFactoryPBImpl.get().getServer( ResourceTracker.class, instance, addr, conf, null, 1); server.start(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -86,12 +86,12 @@ public class TestRPCFactories { ResourceTracker client = null; try { client = (ResourceTracker) RpcClientFactoryPBImpl.get().getClient(ResourceTracker.class, 1, NetUtils.getConnectAddress(server), conf); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create client"); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -103,7 +103,7 @@ public class TestRPCFactories { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { // TODO Auto-generated method stub return null; @@ -111,7 +111,7 @@ public class TestRPCFactories { @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // TODO Auto-generated method stub return null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java index b833e6104bd..6ce11c88d5a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java @@ -20,7 +20,7 @@ package org.apache.hadoop.yarn; import junit.framework.Assert; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; @@ -35,7 +35,7 @@ public class TestRecordFactory { try { NodeHeartbeatRequest request = pbRecordFactory.newRecordInstance(NodeHeartbeatRequest.class); Assert.assertEquals(NodeHeartbeatRequestPBImpl.class, request.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java index 582db06f5f2..9b66fa44c94 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java @@ -33,7 +33,7 @@ import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.AbstractService; @@ -88,7 +88,7 @@ public class LocalDirsHandlerService extends AbstractService { */ private final class MonitoringTimerTask extends TimerTask { - public MonitoringTimerTask(Configuration conf) throws YarnException { + public MonitoringTimerTask(Configuration conf) throws YarnRuntimeException { localDirs = new DirectoryCollection( validatePaths(conf.getTrimmedStrings(YarnConfiguration.NM_LOCAL_DIRS))); logDirs = new DirectoryCollection( @@ -132,7 +132,7 @@ public class LocalDirsHandlerService extends AbstractService { try { localFs = FileContext.getLocalFSFileContext(config); } catch (IOException e) { - throw new YarnException("Unable to get the local filesystem", e); + throw new YarnRuntimeException("Unable to get the local filesystem", e); } FsPermission perm = new FsPermission((short)0755); boolean createSucceeded = localDirs.createNonExistentDirs(localFs, perm); @@ -311,13 +311,13 @@ public class LocalDirsHandlerService extends AbstractService { } else { LOG.warn(paths[i] + " is not a valid path. Path should be with " + FILE_SCHEME + " scheme or without scheme"); - throw new YarnException(paths[i] + throw new YarnRuntimeException(paths[i] + " is not a valid path. Path should be with " + FILE_SCHEME + " scheme or without scheme"); } } catch (IllegalArgumentException e) { LOG.warn(e.getMessage()); - throw new YarnException(paths[i] + throw new YarnRuntimeException(paths[i] + " is not a valid path. Path should be with " + FILE_SCHEME + " scheme or without scheme"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java index e310b9450c3..04bf9ee1189 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java @@ -35,7 +35,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.ContainerManager; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -145,7 +145,7 @@ public class NodeManager extends CompositeService try { exec.init(); } catch (IOException e) { - throw new YarnException("Failed to initialize container executor", e); + throw new YarnRuntimeException("Failed to initialize container executor", e); } DeletionService del = createDeletionService(exec); addService(del); @@ -201,7 +201,7 @@ public class NodeManager extends CompositeService try { doSecureLogin(); } catch (IOException e) { - throw new YarnException("Failed NodeManager login", e); + throw new YarnRuntimeException("Failed NodeManager login", e); } super.start(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java index 53c01d9f25e..b671a954be4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java @@ -35,7 +35,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerState; @@ -45,7 +45,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -205,7 +205,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements } @VisibleForTesting - protected void registerWithRM() throws YarnRemoteException, IOException { + protected void registerWithRM() throws YarnException, IOException { Configuration conf = getConfig(); rmConnectWaitMS = conf.getInt( @@ -220,7 +220,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements * 1000; if(rmConnectionRetryIntervalMS < 0) { - throw new YarnException("Invalid Configuration. " + + throw new YarnRuntimeException("Invalid Configuration. " + YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS + " should not be negative."); } @@ -229,7 +229,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements if(! waitForEver) { if(rmConnectWaitMS < 0) { - throw new YarnException("Invalid Configuration. " + + throw new YarnRuntimeException("Invalid Configuration. " + YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS + " can be -1, but can not be other negative numbers"); } @@ -280,7 +280,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements String errorMessage = "Failed to Connect to RM, " + "no. of failed attempts is "+rmRetryCount; LOG.error(errorMessage,e); - throw new YarnException(errorMessage,e); + throw new YarnRuntimeException(errorMessage,e); } } } @@ -289,7 +289,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements String message = "Message from ResourceManager: " + regNMResponse.getDiagnosticsMessage(); - throw new YarnException( + throw new YarnRuntimeException( "Recieved SHUTDOWN signal from Resourcemanager ,Registration of NodeManager failed, " + message); } @@ -454,7 +454,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements String errorMessage = "Failed to heartbeat to RM, " + "no. of failed attempts is "+rmRetryCount; LOG.error(errorMessage,e); - throw new YarnException(errorMessage,e); + throw new YarnRuntimeException(errorMessage,e); } } } @@ -507,7 +507,7 @@ public class NodeStatusUpdaterImpl extends AbstractService implements dispatcher.getEventHandler().handle( new CMgrCompletedAppsEvent(appsToCleanup)); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { //catch and throw the exception if tried MAX wait time to connect RM dispatcher.getEventHandler().handle( new NodeManagerEvent(NodeManagerEventType.SHUTDOWN)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocol.java index 4e7a0728459..6b634838bbd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocol.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocol.java @@ -19,11 +19,11 @@ package org.apache.hadoop.yarn.server.nodemanager.api; import java.io.IOException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus; public interface LocalizationProtocol { public LocalizerHeartbeatResponse heartbeat(LocalizerStatus status) - throws YarnRemoteException, IOException; + throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java index 8ec1e81e125..b9cb8d96f09 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java @@ -24,7 +24,7 @@ import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerStatusProto; import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol; @@ -56,7 +56,7 @@ public class LocalizationProtocolPBClientImpl implements LocalizationProtocol, @Override public LocalizerHeartbeatResponse heartbeat(LocalizerStatus status) - throws YarnRemoteException, IOException { + throws YarnException, IOException { LocalizerStatusProto statusProto = ((LocalizerStatusPBImpl)status).getProto(); try { return new LocalizerHeartbeatResponsePBImpl( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/service/LocalizationProtocolPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/service/LocalizationProtocolPBServiceImpl.java index d2b4b4e3a02..cb2a6f714e2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/service/LocalizationProtocolPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/service/LocalizationProtocolPBServiceImpl.java @@ -25,7 +25,7 @@ import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.impl.pb.Loc import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerHeartbeatResponseProto; import org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerStatusProto; import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol; @@ -47,7 +47,7 @@ public class LocalizationProtocolPBServiceImpl implements LocalizationProtocolPB try { LocalizerHeartbeatResponse response = real.heartbeat(request); return ((LocalizerHeartbeatResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java index deb7e37c75f..7cdba1e532a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java @@ -56,7 +56,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -275,7 +275,7 @@ public class ContainerManagerImpl extends CompositeService implements // Get the remoteUGI corresponding to the api call. private UserGroupInformation getRemoteUgi() - throws YarnRemoteException { + throws YarnException { UserGroupInformation remoteUgi; try { remoteUgi = UserGroupInformation.getCurrentUser(); @@ -309,7 +309,7 @@ public class ContainerManagerImpl extends CompositeService implements protected ContainerTokenIdentifier getContainerTokenIdentifier( UserGroupInformation remoteUgi, ContainerTokenIdentifier containerTokenIdentifier) - throws YarnRemoteException { + throws YarnException { if (UserGroupInformation.isSecurityEnabled()) { if (LOG.isDebugEnabled()) { LOG.debug("Number of TokenIdentifiers in the UGI from RPC: " @@ -331,14 +331,14 @@ public class ContainerManagerImpl extends CompositeService implements * passed if verifying the startContainer, null otherwise. * @param remoteUgi * ugi corresponding to the remote end making the api-call - * @throws YarnRemoteException + * @throws YarnException */ @Private @VisibleForTesting protected void authorizeRequest(String containerIDStr, ContainerLaunchContext launchContext, UserGroupInformation remoteUgi, ContainerTokenIdentifier tokenId) - throws YarnRemoteException { + throws YarnException { boolean unauthorized = false; StringBuilder messageBuilder = @@ -391,7 +391,7 @@ public class ContainerManagerImpl extends CompositeService implements @SuppressWarnings("unchecked") @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { if (blockNewContainerRequests.get()) { throw RPCUtil.getRemoteException(new NMNotYetReadyException( @@ -507,7 +507,7 @@ public class ContainerManagerImpl extends CompositeService implements @Override @SuppressWarnings("unchecked") public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ContainerId containerID = request.getContainerId(); String containerIDStr = containerID.toString(); @@ -549,7 +549,7 @@ public class ContainerManagerImpl extends CompositeService implements @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException, + GetContainerStatusRequest request) throws YarnException, IOException { ContainerId containerID = request.getContainerId(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/InvalidContainerException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/InvalidContainerException.java index 87f1cae243a..d7613261620 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/InvalidContainerException.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/InvalidContainerException.java @@ -18,12 +18,12 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; /** * This Exception happens when NM is rejecting container requests from RM */ -public class InvalidContainerException extends YarnException { +public class InvalidContainerException extends YarnRuntimeException { private static final long serialVersionUID = 1L; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/NMNotYetReadyException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/NMNotYetReadyException.java index a47f68120b9..d63bd2ef6d5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/NMNotYetReadyException.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/NMNotYetReadyException.java @@ -18,13 +18,13 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; /** * This exception happens when NM starts from scratch but has not yet connected * with RM. */ -public class NMNotYetReadyException extends YarnException { +public class NMNotYetReadyException extends YarnRuntimeException { private static final long serialVersionUID = 1L; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java index 0fb963a7317..163b2dcaa93 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java @@ -31,7 +31,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.UnsupportedFileSystemException; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; @@ -96,7 +96,7 @@ public class ContainersLauncher extends AbstractService //TODO Is this required? FileContext.getLocalFSFileContext(conf); } catch (UnsupportedFileSystemException e) { - throw new YarnException("Failed to start ContainersLauncher", e); + throw new YarnRuntimeException("Failed to start ContainersLauncher", e); } super.init(conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java index 8dce003bb69..706cedd6363 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java @@ -53,7 +53,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.DiskChecker; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.records.LocalResource; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -253,13 +253,13 @@ public class ContainerLocalizer { // ignore response try { nodemanager.heartbeat(status); - } catch (YarnRemoteException e) { } + } catch (YarnException e) { } return; } cs.poll(1000, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { return; - } catch (YarnRemoteException e) { + } catch (YarnException e) { // TODO cleanup return; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java index a44a99db4a9..eb79c50bf44 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java @@ -66,7 +66,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.DiskChecker; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.LocalResource; @@ -174,7 +174,7 @@ public class ResourceLocalizationService extends CompositeService try { return FileContext.getLocalFSFileContext(conf); } catch (IOException e) { - throw new YarnException("Failed to access local fs"); + throw new YarnRuntimeException("Failed to access local fs"); } } @@ -185,7 +185,7 @@ public class ResourceLocalizationService extends CompositeService if (perDirFileLimit <= 36) { LOG.error(YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY + " parameter is configured with very low value."); - throw new YarnException( + throw new YarnRuntimeException( YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY + " parameter is configured with a value less than 37."); } else { @@ -224,7 +224,7 @@ public class ResourceLocalizationService extends CompositeService lfs.mkdir(new Path(logDir), null, true); } } catch (IOException e) { - throw new YarnException("Failed to initialize LocalizationService", e); + throw new YarnRuntimeException("Failed to initialize LocalizationService", e); } cacheTargetSize = @@ -318,7 +318,7 @@ public class ResourceLocalizationService extends CompositeService ((ApplicationLocalizationEvent)event).getApplication()); break; default: - throw new YarnException("Unknown localization event: " + event); + throw new YarnRuntimeException("Unknown localization event: " + event); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java index 9567b60933c..0170080b566 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java @@ -37,7 +37,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -169,7 +169,7 @@ public class LogAggregationService extends AbstractService implements try { remoteFS = FileSystem.get(conf); } catch (IOException e) { - throw new YarnException("Unable to get Remote FileSystem instance", e); + throw new YarnRuntimeException("Unable to get Remote FileSystem instance", e); } boolean remoteExists = true; try { @@ -184,7 +184,7 @@ public class LogAggregationService extends AbstractService implements } catch (FileNotFoundException e) { remoteExists = false; } catch (IOException e) { - throw new YarnException( + throw new YarnRuntimeException( "Failed to check permissions for dir [" + this.remoteRootLogDir + "]", e); } @@ -198,7 +198,7 @@ public class LogAggregationService extends AbstractService implements remoteFS.mkdirs(qualified, new FsPermission(TLDIR_PERMISSIONS)); remoteFS.setPermission(qualified, new FsPermission(TLDIR_PERMISSIONS)); } catch (IOException e) { - throw new YarnException("Failed to create remoteLogDir [" + throw new YarnRuntimeException("Failed to create remoteLogDir [" + this.remoteRootLogDir + "]", e); } } @@ -279,7 +279,7 @@ public class LogAggregationService extends AbstractService implements } }); } catch (Exception e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -293,7 +293,7 @@ public class LogAggregationService extends AbstractService implements initAppAggregator(appId, user, credentials, logRetentionPolicy, appAcls); eventResponse = new ApplicationEvent(appId, ApplicationEventType.APPLICATION_LOG_HANDLING_INITED); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { LOG.warn("Application failed to init aggregation: " + e.getMessage()); eventResponse = new ApplicationEvent(appId, ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED); @@ -319,7 +319,7 @@ public class LogAggregationService extends AbstractService implements getRemoteNodeLogFileForApp(appId, user), logRetentionPolicy, appAcls); if (this.appLogAggregators.putIfAbsent(appId, appLogAggregator) != null) { - throw new YarnException("Duplicate initApp for " + appId); + throw new YarnRuntimeException("Duplicate initApp for " + appId); } // wait until check for existing aggregator to create dirs try { @@ -328,10 +328,10 @@ public class LogAggregationService extends AbstractService implements } catch (Exception e) { appLogAggregators.remove(appId); closeFileSystems(userUgi); - if (!(e instanceof YarnException)) { - e = new YarnException(e); + if (!(e instanceof YarnRuntimeException)) { + e = new YarnRuntimeException(e); } - throw (YarnException)e; + throw (YarnRuntimeException)e; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java index 2e74a3bfd32..b3190464088 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java @@ -23,7 +23,7 @@ import static org.apache.hadoop.yarn.util.StringHelper.pajoin; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; @@ -70,7 +70,7 @@ public class WebServer extends AbstractService { } catch (Exception e) { String msg = "NMWebapps failed to start."; LOG.error(msg, e); - throw new YarnException(msg); + throw new YarnRuntimeException(msg); } super.start(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java index 63b0fd4537a..d83f9b6ac62 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java @@ -29,7 +29,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; @@ -182,14 +182,14 @@ public class DummyContainerManager extends ContainerManagerImpl { protected void authorizeRequest(String containerIDStr, ContainerLaunchContext launchContext, UserGroupInformation remoteUgi, ContainerTokenIdentifier tokenId) - throws YarnRemoteException { + throws YarnException { // do Nothing } @Override protected ContainerTokenIdentifier getContainerTokenIdentifier(UserGroupInformation remoteUgi, - ContainerTokenIdentifier containerTokenId) throws YarnRemoteException { + ContainerTokenIdentifier containerTokenId) throws YarnException { return containerTokenId; } } \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/LocalRMInterface.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/LocalRMInterface.java index 87f8e231bf5..fc0c65a800d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/LocalRMInterface.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/LocalRMInterface.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.ResourceTracker; @@ -39,7 +39,7 @@ public class LocalRMInterface implements ResourceTracker { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory.newRecordInstance(RegisterNodeManagerResponse.class); MasterKey masterKey = new MasterKeyPBImpl(); @@ -52,7 +52,7 @@ public class LocalRMInterface implements ResourceTracker { @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeHeartbeatResponse response = recordFactory.newRecordInstance(NodeHeartbeatResponse.class); return response; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java index 4c96d2dbef5..3dbc7aebd6c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java @@ -25,7 +25,7 @@ import java.nio.ByteBuffer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.ResourceTracker; @@ -67,7 +67,7 @@ public class MockNodeStatusUpdater extends NodeStatusUpdaterImpl { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory .newRecordInstance(RegisterNodeManagerResponse.class); @@ -81,7 +81,7 @@ public class MockNodeStatusUpdater extends NodeStatusUpdaterImpl { @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeStatus nodeStatus = request.getNodeStatus(); LOG.info("Got heartbeat number " + heartBeatID); nodeStatus.setResponseId(heartBeatID++); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerManagerWithLCE.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerManagerWithLCE.java index 396706df8a1..fe2655d9903 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerManagerWithLCE.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerManagerWithLCE.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.TestContainerManager; import org.junit.After; @@ -75,7 +75,7 @@ public class TestContainerManagerWithLCE extends TestContainerManager { @Override public void testContainerSetup() throws IOException, InterruptedException, - YarnRemoteException { + YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); @@ -98,7 +98,7 @@ public class TestContainerManagerWithLCE extends TestContainerManager { @Override public void testContainerLaunchAndStop() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); @@ -110,7 +110,7 @@ public class TestContainerManagerWithLCE extends TestContainerManager { @Override public void testContainerLaunchAndExitSuccess() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); @@ -122,7 +122,7 @@ public class TestContainerManagerWithLCE extends TestContainerManager { @Override public void testContainerLaunchAndExitFailure() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); @@ -134,7 +134,7 @@ public class TestContainerManagerWithLCE extends TestContainerManager { @Override public void testLocalFilesCleanup() throws InterruptedException, - IOException, YarnRemoteException { + IOException, YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java index a772a8640c3..2eaa65c17bf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java @@ -35,7 +35,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.ResourceTracker; @@ -63,7 +63,7 @@ public class TestEventFlow { @Test public void testSuccessfulContainerLaunch() throws InterruptedException, - IOException, YarnRemoteException { + IOException, YarnException { FileContext localFS = FileContext.getLocalFSFileContext(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLocalDirsHandlerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLocalDirsHandlerService.java index fc6fba04517..14764de3292 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLocalDirsHandlerService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLocalDirsHandlerService.java @@ -23,7 +23,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.Service.STATE; import org.junit.AfterClass; @@ -71,7 +71,7 @@ public class TestLocalDirsHandlerService { try { dirSvc.init(conf); Assert.fail("Service should have thrown an exception due to wrong URI"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { } Assert.assertTrue("Service should not be inited", dirSvc.getServiceState() .compareTo(STATE.NOTINITED) == 0); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java index 98fabe1c5d0..9fc779578c3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java @@ -22,7 +22,7 @@ import static org.junit.Assert.fail; import java.io.IOException; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.junit.Test; @@ -46,7 +46,7 @@ public class TestNodeManager { try { nm.init(conf); fail("Init should fail"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { //PASS assert(e.getCause().getMessage().contains("dummy executor init called")); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java index d0cd6b297fb..3cf4601d92e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java @@ -55,7 +55,7 @@ import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; @@ -98,7 +98,7 @@ public class TestNodeManagerReboot { @Test(timeout = 2000000) public void testClearLocalDirWhenNodeReboot() throws IOException, - YarnRemoteException, InterruptedException { + YarnException, InterruptedException { nm = new MyNodeManager(); nm.start(); @@ -147,7 +147,7 @@ public class TestNodeManagerReboot { .createRemoteUser(cId.toString()); currentUser.doAs(new PrivilegedExceptionAction() { @Override - public Void run() throws YarnRemoteException, IOException { + public Void run() throws YarnException, IOException { containerManager.startContainer(startRequest); return null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java index a55026883e2..3765198b0fe 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java @@ -35,7 +35,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl; @@ -82,7 +82,7 @@ public class TestNodeManagerResync { @SuppressWarnings("unchecked") @Test public void testKillContainersOnResync() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { NodeManager nm = new TestNodeManager1(); YarnConfiguration conf = createNMConfig(); nm.init(conf); @@ -110,7 +110,7 @@ public class TestNodeManagerResync { @SuppressWarnings("unchecked") @Test public void testBlockNewContainerRequestsOnStartAndResync() - throws IOException, InterruptedException, YarnRemoteException { + throws IOException, InterruptedException, YarnException { NodeManager nm = new TestNodeManager2(); YarnConfiguration conf = createNMConfig(); nm.init(conf); @@ -166,7 +166,7 @@ public class TestNodeManagerResync { } @Override - protected void registerWithRM() throws YarnRemoteException, IOException { + protected void registerWithRM() throws YarnException, IOException { super.registerWithRM(); registrationCount++; } @@ -288,7 +288,7 @@ public class TestNodeManagerResync { numContainers++; try { getContainerManager().startContainer(startRequest); - } catch (YarnRemoteException e) { + } catch (YarnException e) { numContainersRejected++; Assert.assertTrue(e.getMessage().contains( "Rejecting new containers as NodeManager has not" + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java index 1d3e38c5bc0..e0db8269195 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java @@ -57,7 +57,7 @@ import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -103,7 +103,7 @@ public class TestNodeManagerShutdown { @Test public void testKillContainersOnShutdown() throws IOException, - YarnRemoteException { + YarnException { NodeManager nm = new TestNodeManager(); nm.init(createNMConfig()); nm.start(); @@ -150,7 +150,7 @@ public class TestNodeManagerShutdown { public static void startContainer(NodeManager nm, ContainerId cId, FileContext localFS, File scriptFileDir, File processStartFile) - throws IOException, YarnRemoteException { + throws IOException, YarnException { File scriptFile = createUnhaltingScriptFile(cId, scriptFileDir, processStartFile); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java index 00ce1c7d18a..8789fa369fd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java @@ -42,7 +42,7 @@ import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -54,7 +54,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -134,7 +134,7 @@ public class TestNodeStatusUpdater { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { NodeId nodeId = request.getNodeId(); Resource resource = request.getResource(); @@ -171,7 +171,7 @@ public class TestNodeStatusUpdater { @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeStatus nodeStatus = request.getNodeStatus(); LOG.info("Got heartbeat number " + heartBeatID); NodeManagerMetrics mockMetrics = mock(NodeManagerMetrics.class); @@ -334,7 +334,7 @@ public class TestNodeStatusUpdater { protected ResourceTracker getRMClient() { if(System.currentTimeMillis() - waitStartTime <= rmStartIntervalMS || rmNeverStart) { - throw new YarnException("Faking RM start failure as start " + + throw new YarnRuntimeException("Faking RM start failure as start " + "delay timer has not expired."); } else { return resourceTracker; @@ -407,7 +407,7 @@ public class TestNodeStatusUpdater { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory @@ -419,7 +419,7 @@ public class TestNodeStatusUpdater { } @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeStatus nodeStatus = request.getNodeStatus(); nodeStatus.setResponseId(heartBeatID++); @@ -445,7 +445,7 @@ public class TestNodeStatusUpdater { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = @@ -457,7 +457,7 @@ public class TestNodeStatusUpdater { @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { LOG.info("Got heartBeatId: [" + heartBeatID +"]"); NodeStatus nodeStatus = request.getNodeStatus(); nodeStatus.setResponseId(heartBeatID++); @@ -497,7 +497,7 @@ public class TestNodeStatusUpdater { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory .newRecordInstance(RegisterNodeManagerResponse.class); @@ -508,7 +508,7 @@ public class TestNodeStatusUpdater { @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { try { if (heartBeatID == 0) { Assert.assertEquals(request.getNodeStatus().getContainersStatuses() @@ -537,7 +537,7 @@ public class TestNodeStatusUpdater { .get(4).getState() == ContainerState.RUNNING && request.getNodeStatus().getContainersStatuses().get(4) .getContainerId().getId() == 5); - throw new YarnException("Lost the heartbeat response"); + throw new YarnRuntimeException("Lost the heartbeat response"); } else if (heartBeatID == 2) { Assert.assertEquals(request.getNodeStatus().getContainersStatuses() .size(), 7); @@ -589,7 +589,7 @@ public class TestNodeStatusUpdater { public NodeAction registerNodeAction = NodeAction.NORMAL; @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory @@ -602,7 +602,7 @@ public class TestNodeStatusUpdater { @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { heartBeatID++; throw RPCUtil.getRemoteException("NodeHeartbeat exception"); } @@ -646,7 +646,7 @@ public class TestNodeStatusUpdater { nm.start(); } catch (Throwable e) { TestNodeStatusUpdater.this.nmStartError = e; - throw new YarnException(e); + throw new YarnRuntimeException(e); } } }.start(); @@ -765,7 +765,7 @@ public class TestNodeStatusUpdater { return nodeStatusUpdater; } }; - verifyNodeStartFailure("org.apache.hadoop.yarn.YarnException: " + verifyNodeStartFailure("org.apache.hadoop.yarn.YarnRuntimeException: " + "Recieved SHUTDOWN signal from Resourcemanager ," + "Registration of NodeManager failed, " + "Message from ResourceManager: RM Shutting Down Node"); @@ -867,7 +867,7 @@ public class TestNodeStatusUpdater { @Override public void start() { // Simulating failure of starting RPC server - throw new YarnException("Starting of RPC Server failed"); + throw new YarnRuntimeException("Starting of RPC Server failed"); } }; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRPCFactories.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRPCFactories.java index fdb10079fef..39b3337d772 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRPCFactories.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRPCFactories.java @@ -27,7 +27,7 @@ import junit.framework.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol; @@ -56,7 +56,7 @@ public class TestRPCFactories { RpcServerFactoryPBImpl.get().getServer( LocalizationProtocol.class, instance, addr, conf, null, 1); server.start(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -87,12 +87,12 @@ public class TestRPCFactories { LocalizationProtocol.class, 1, NetUtils.getConnectAddress(server), conf); Assert.assertNotNull(client); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create client"); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRecordFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRecordFactory.java index 157134ce33d..92d24bb9879 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRecordFactory.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRecordFactory.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.yarn.server.nodemanager; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse; @@ -37,7 +37,7 @@ public class TestRecordFactory { LocalizerHeartbeatResponse.class); Assert.assertEquals(LocalizerHeartbeatResponsePBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java index eae48ab09ca..6aa4ff93164 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java @@ -40,7 +40,7 @@ import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; @@ -183,7 +183,7 @@ public abstract class BaseContainerManagerTest { @Override protected void authorizeRequest(String containerIDStr, ContainerLaunchContext launchContext, UserGroupInformation remoteUgi, - ContainerTokenIdentifier tokenId) throws YarnRemoteException { + ContainerTokenIdentifier tokenId) throws YarnException { // do nothing } }; @@ -212,13 +212,13 @@ public abstract class BaseContainerManagerTest { public static void waitForContainerState(ContainerManager containerManager, ContainerId containerID, ContainerState finalState) - throws InterruptedException, YarnRemoteException, IOException { + throws InterruptedException, YarnException, IOException { waitForContainerState(containerManager, containerID, finalState, 20); } public static void waitForContainerState(ContainerManager containerManager, ContainerId containerID, ContainerState finalState, int timeOutMax) - throws InterruptedException, YarnRemoteException, IOException { + throws InterruptedException, YarnException, IOException { GetContainerStatusRequest request = recordFactory.newRecordInstance(GetContainerStatusRequest.class); request.setContainerId(containerID); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index 519f62efbde..dbda9fb114c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.URL; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.api.ResourceManagerConstants; import org.apache.hadoop.yarn.server.nodemanager.CMgrCompletedAppsEvent; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; @@ -94,7 +94,7 @@ public class TestContainerManager extends BaseContainerManagerTest { ContainerId cId = createContainerId(); request.setContainerId(cId); containerManager.getContainerStatus(request); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throwsException = true; } Assert.assertTrue(throwsException); @@ -102,7 +102,7 @@ public class TestContainerManager extends BaseContainerManagerTest { @Test public void testContainerSetup() throws IOException, InterruptedException, - YarnRemoteException { + YarnException { containerManager.start(); @@ -184,7 +184,7 @@ public class TestContainerManager extends BaseContainerManagerTest { @Test public void testContainerLaunchAndStop() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { containerManager.start(); File scriptFile = Shell.appendScriptExtension(tmpDir, "scriptFile"); @@ -287,7 +287,7 @@ public class TestContainerManager extends BaseContainerManagerTest { } private void testContainerLaunchAndExit(int exitCode) throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { File scriptFile = Shell.appendScriptExtension(tmpDir, "scriptFile"); PrintWriter fileWriter = new PrintWriter(scriptFile); @@ -362,7 +362,7 @@ public class TestContainerManager extends BaseContainerManagerTest { @Test public void testContainerLaunchAndExitSuccess() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { containerManager.start(); int exitCode = 0; @@ -373,7 +373,7 @@ public class TestContainerManager extends BaseContainerManagerTest { @Test public void testContainerLaunchAndExitFailure() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { containerManager.start(); int exitCode = 50; @@ -384,7 +384,7 @@ public class TestContainerManager extends BaseContainerManagerTest { @Test public void testLocalFilesCleanup() throws InterruptedException, - IOException, YarnRemoteException { + IOException, YarnException { // Real del service delSrvc = new DeletionService(exec); delSrvc.init(conf); @@ -524,7 +524,7 @@ public class TestContainerManager extends BaseContainerManagerTest { boolean catchException = false; try { containerManager.startContainer(startRequest1); - } catch (YarnRemoteException e) { + } catch (YarnException e) { catchException = true; Assert.assertTrue(e.getMessage().contains( "Container " + cId1 + " rejected as it is allocated by a previous RM")); @@ -549,10 +549,10 @@ public class TestContainerManager extends BaseContainerManagerTest { boolean noException = true; try { containerManager.startContainer(startRequest2); - } catch (YarnRemoteException e) { + } catch (YarnException e) { noException = false; } - // Verify that startContainer get no YarnRemoteException + // Verify that startContainer get no YarnException Assert.assertTrue(noException); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java index 38d513603f6..b42ed026e4f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java @@ -58,7 +58,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; @@ -205,7 +205,7 @@ public class TestContainerLocalizer { // verify filesystems are closed when localizer fails localizer = setupContainerLocalizerForTest(); - doThrow(new YarnException("Forced Failure")).when(localizer).localizeFiles( + doThrow(new YarnRuntimeException("Forced Failure")).when(localizer).localizeFiles( any(LocalizationProtocol.class), any(CompletionService.class), any(UserGroupInformation.class)); verify(localizer, never()).closeFileSystems( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java index 057d7cce6fc..cc2f7eea2fc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java @@ -21,7 +21,7 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer; import junit.framework.Assert; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.junit.Test; @@ -80,7 +80,7 @@ public class TestLocalCacheDirectoryManager { e = e1; } Assert.assertNotNull(e); - Assert.assertEquals(YarnException.class, e.getClass()); + Assert.assertEquals(YarnRuntimeException.class, e.getClass()); Assert.assertEquals(e.getMessage(), YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY + " parameter is configured with a value less than 37."); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java index faf8f92c0f7..8728beeb91d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java @@ -85,7 +85,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.DrainDispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; @@ -1014,7 +1014,7 @@ public class TestResourceLocalizationService { LocalizerStatus status = createLocalizerStatus(localizerId); LocalResourceStatus resourceStatus = new LocalResourceStatusPBImpl(); resourceStatus.setException(YarnServerBuilderUtils - .newSerializedException(new YarnRemoteException("test"))); + .newSerializedException(new YarnException("test"))); resourceStatus.setStatus(ResourceStatusType.FETCH_FAILURE); resourceStatus.setResource(req); status.addResourceStatus(resourceStatus); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index 7554bbba3dd..36e196008a4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -59,7 +59,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -78,7 +78,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.DrainDispatcher; import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat; @@ -431,7 +431,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { super.dirsHandler)); logAggregationService.init(this.conf); - YarnException e = new YarnException("KABOOM!"); + YarnRuntimeException e = new YarnRuntimeException("KABOOM!"); doThrow(e) .when(logAggregationService).verifyAndCreateRemoteLogDir( any(Configuration.class)); @@ -528,7 +528,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { ApplicationId appId = BuilderUtils.newApplicationId( System.currentTimeMillis(), (int)Math.random()); - doThrow(new YarnException("KABOOM!")) + doThrow(new YarnRuntimeException("KABOOM!")) .when(logAggregationService).initAppAggregator( eq(appId), eq(user), any(Credentials.class), any(ContainerLogsRetentionPolicy.class), anyMap()); @@ -708,7 +708,7 @@ public class TestLogAggregationService extends BaseContainerManagerTest { @Test public void testLogAggregationForRealContainerLaunch() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { this.containerManager.start(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java index 8b518d9f57a..7a8c61af276 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java @@ -56,7 +56,7 @@ import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal; @@ -179,7 +179,7 @@ public class TestContainersMonitor extends BaseContainerManagerTest { @Test public void testContainerKillOnMemoryOverflow() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { if (!ProcfsBasedProcessTree.isAvailable()) { return; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java index e1fb6d367a6..e023b3a98b2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.RefreshSuperUserGroupsConfigur import org.apache.hadoop.yarn.api.protocolrecords.RefreshUserToGroupsMappingsRequest; import org.apache.hadoop.yarn.api.protocolrecords.RefreshUserToGroupsMappingsResponse; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -133,7 +133,7 @@ public class AdminService extends AbstractService implements RMAdminProtocol { super.stop(); } - private UserGroupInformation checkAcls(String method) throws YarnRemoteException { + private UserGroupInformation checkAcls(String method) throws YarnException { UserGroupInformation user; try { user = UserGroupInformation.getCurrentUser(); @@ -168,7 +168,7 @@ public class AdminService extends AbstractService implements RMAdminProtocol { @Override public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) - throws YarnRemoteException { + throws YarnException { UserGroupInformation user = checkAcls("refreshQueues"); try { scheduler.reinitialize(conf, this.rmContext); @@ -186,7 +186,7 @@ public class AdminService extends AbstractService implements RMAdminProtocol { @Override public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) - throws YarnRemoteException { + throws YarnException { UserGroupInformation user = checkAcls("refreshNodes"); try { this.nodesListManager.refreshNodes(new YarnConfiguration()); @@ -204,7 +204,7 @@ public class AdminService extends AbstractService implements RMAdminProtocol { @Override public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration( RefreshSuperUserGroupsConfigurationRequest request) - throws YarnRemoteException { + throws YarnException { UserGroupInformation user = checkAcls("refreshSuperUserGroupsConfiguration"); ProxyUsers.refreshSuperUserGroupsConfiguration(new Configuration()); @@ -217,7 +217,7 @@ public class AdminService extends AbstractService implements RMAdminProtocol { @Override public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( - RefreshUserToGroupsMappingsRequest request) throws YarnRemoteException { + RefreshUserToGroupsMappingsRequest request) throws YarnException { UserGroupInformation user = checkAcls("refreshUserToGroupsMappings"); Groups.getUserToGroupsMappingService().refresh(); @@ -230,7 +230,7 @@ public class AdminService extends AbstractService implements RMAdminProtocol { @Override public RefreshAdminAclsResponse refreshAdminAcls( - RefreshAdminAclsRequest request) throws YarnRemoteException { + RefreshAdminAclsRequest request) throws YarnException { UserGroupInformation user = checkAcls("refreshAdminAcls"); Configuration conf = new Configuration(); @@ -245,7 +245,7 @@ public class AdminService extends AbstractService implements RMAdminProtocol { @Override public RefreshServiceAclsResponse refreshServiceAcls( - RefreshServiceAclsRequest request) throws YarnRemoteException { + RefreshServiceAclsRequest request) throws YarnException { Configuration conf = new Configuration(); if (!conf.getBoolean( CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java index 8dcff5edc36..df0911ffe4a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java @@ -55,7 +55,7 @@ import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -139,7 +139,7 @@ public class ApplicationMasterService extends AbstractService implements } private void authorizeRequest(ApplicationAttemptId appAttemptID) - throws YarnRemoteException { + throws YarnException { if (!UserGroupInformation.isSecurityEnabled()) { return; @@ -169,7 +169,7 @@ public class ApplicationMasterService extends AbstractService implements @Override public RegisterApplicationMasterResponse registerApplicationMaster( - RegisterApplicationMasterRequest request) throws YarnRemoteException, + RegisterApplicationMasterRequest request) throws YarnException, IOException { ApplicationAttemptId applicationAttemptId = request @@ -219,7 +219,7 @@ public class ApplicationMasterService extends AbstractService implements @Override public FinishApplicationMasterResponse finishApplicationMaster( - FinishApplicationMasterRequest request) throws YarnRemoteException, + FinishApplicationMasterRequest request) throws YarnException, IOException { ApplicationAttemptId applicationAttemptId = request @@ -252,7 +252,7 @@ public class ApplicationMasterService extends AbstractService implements @Override public AllocateResponse allocate(AllocateRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ApplicationAttemptId appAttemptId = request.getApplicationAttemptId(); authorizeRequest(appAttemptId); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java index 9b80fe67a7b..f1459fa7dac 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java @@ -72,7 +72,7 @@ import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -204,7 +204,7 @@ public class ClientRMService extends AbstractService implements @Override public GetNewApplicationResponse getNewApplication( - GetNewApplicationRequest request) throws YarnRemoteException { + GetNewApplicationRequest request) throws YarnException { GetNewApplicationResponse response = recordFactory .newRecordInstance(GetNewApplicationResponse.class); response.setApplicationId(getNewApplicationId()); @@ -223,7 +223,7 @@ public class ClientRMService extends AbstractService implements */ @Override public GetApplicationReportResponse getApplicationReport( - GetApplicationReportRequest request) throws YarnRemoteException { + GetApplicationReportRequest request) throws YarnException { ApplicationId applicationId = request.getApplicationId(); UserGroupInformation callerUGI; @@ -255,7 +255,7 @@ public class ClientRMService extends AbstractService implements @Override public SubmitApplicationResponse submitApplication( - SubmitApplicationRequest request) throws YarnRemoteException { + SubmitApplicationRequest request) throws YarnException { ApplicationSubmissionContext submissionContext = request .getApplicationSubmissionContext(); ApplicationId applicationId = submissionContext.getApplicationId(); @@ -316,7 +316,7 @@ public class ClientRMService extends AbstractService implements " submitted by user " + user); RMAuditLogger.logSuccess(user, AuditConstants.SUBMIT_APP_REQUEST, "ClientRMService", applicationId); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.info("Exception in submitting application with id " + applicationId.getId(), e); RMAuditLogger.logFailure(user, AuditConstants.SUBMIT_APP_REQUEST, @@ -333,7 +333,7 @@ public class ClientRMService extends AbstractService implements @SuppressWarnings("unchecked") @Override public KillApplicationResponse forceKillApplication( - KillApplicationRequest request) throws YarnRemoteException { + KillApplicationRequest request) throws YarnException { ApplicationId applicationId = request.getApplicationId(); @@ -382,7 +382,7 @@ public class ClientRMService extends AbstractService implements @Override public GetClusterMetricsResponse getClusterMetrics( - GetClusterMetricsRequest request) throws YarnRemoteException { + GetClusterMetricsRequest request) throws YarnException { GetClusterMetricsResponse response = recordFactory .newRecordInstance(GetClusterMetricsResponse.class); YarnClusterMetrics ymetrics = recordFactory @@ -394,7 +394,7 @@ public class ClientRMService extends AbstractService implements @Override public GetAllApplicationsResponse getAllApplications( - GetAllApplicationsRequest request) throws YarnRemoteException { + GetAllApplicationsRequest request) throws YarnException { UserGroupInformation callerUGI; try { @@ -419,7 +419,7 @@ public class ClientRMService extends AbstractService implements @Override public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request) - throws YarnRemoteException { + throws YarnException { GetClusterNodesResponse response = recordFactory.newRecordInstance(GetClusterNodesResponse.class); Collection nodes = this.rmContext.getRMNodes().values(); @@ -433,7 +433,7 @@ public class ClientRMService extends AbstractService implements @Override public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request) - throws YarnRemoteException { + throws YarnException { GetQueueInfoResponse response = recordFactory.newRecordInstance(GetQueueInfoResponse.class); try { @@ -482,7 +482,7 @@ public class ClientRMService extends AbstractService implements @Override public GetQueueUserAclsInfoResponse getQueueUserAcls( - GetQueueUserAclsInfoRequest request) throws YarnRemoteException { + GetQueueUserAclsInfoRequest request) throws YarnException { GetQueueUserAclsInfoResponse response = recordFactory.newRecordInstance(GetQueueUserAclsInfoResponse.class); response.setUserAclsInfoList(scheduler.getQueueUserAclInfo()); @@ -492,7 +492,7 @@ public class ClientRMService extends AbstractService implements @Override public GetDelegationTokenResponse getDelegationToken( - GetDelegationTokenRequest request) throws YarnRemoteException { + GetDelegationTokenRequest request) throws YarnException { try { // Verify that the connection is kerberos authenticated @@ -530,7 +530,7 @@ public class ClientRMService extends AbstractService implements @Override public RenewDelegationTokenResponse renewDelegationToken( - RenewDelegationTokenRequest request) throws YarnRemoteException { + RenewDelegationTokenRequest request) throws YarnException { try { if (!isAllowedDelegationTokenOp()) { throw new IOException( @@ -555,7 +555,7 @@ public class ClientRMService extends AbstractService implements @Override public CancelDelegationTokenResponse cancelDelegationToken( - CancelDelegationTokenRequest request) throws YarnRemoteException { + CancelDelegationTokenRequest request) throws YarnException { try { if (!isAllowedDelegationTokenOp()) { throw new IOException( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java index 41b5881da39..1c502b09fb3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java @@ -29,7 +29,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.HostsFileReader; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -80,7 +80,7 @@ public class NodesListManager extends AbstractService implements } catch (IOException ioe2) { // Should *never* happen this.hostsReader = null; - throw new YarnException(ioe2); + throw new YarnRuntimeException(ioe2); } } super.init(conf); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java index fc4d7d4f086..28449241bda 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java @@ -34,7 +34,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger.AuditConstants; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; @@ -239,7 +239,7 @@ public class RMAppManager implements EventHandler, @SuppressWarnings("unchecked") protected void submitApplication( ApplicationSubmissionContext submissionContext, long submitTime, - boolean isRecovered, String user) throws YarnRemoteException { + boolean isRecovered, String user) throws YarnException { ApplicationId applicationId = submissionContext.getApplicationId(); // Validation of the ApplicationSubmissionContext needs to be completed diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java index b8208a29518..47f83455b41 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java @@ -34,7 +34,7 @@ import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -297,11 +297,11 @@ public class ResourceManager extends CompositeService implements Recoverable { return (ResourceScheduler) ReflectionUtils.newInstance(schedulerClazz, this.conf); } else { - throw new YarnException("Class: " + schedulerClassName + throw new YarnRuntimeException("Class: " + schedulerClassName + " not instance of " + ResourceScheduler.class.getCanonicalName()); } } catch (ClassNotFoundException e) { - throw new YarnException("Could not instantiate Scheduler: " + throw new YarnRuntimeException("Could not instantiate Scheduler: " + schedulerClassName, e); } } @@ -334,7 +334,7 @@ public class ResourceManager extends CompositeService implements Recoverable { conf.getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS); if (globalMaxAppAttempts <= 0) { - throw new YarnException("Invalid global max attempts configuration" + throw new YarnRuntimeException("Invalid global max attempts configuration" + ", " + YarnConfiguration.RM_AM_MAX_ATTEMPTS + "=" + globalMaxAppAttempts + ", it should be a positive integer."); } @@ -348,7 +348,7 @@ public class ResourceManager extends CompositeService implements Recoverable { YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB); if (minMem <= 0 || minMem > maxMem) { - throw new YarnException("Invalid resource scheduler memory" + throw new YarnRuntimeException("Invalid resource scheduler memory" + " allocation configuration" + ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB + "=" + minMem @@ -366,7 +366,7 @@ public class ResourceManager extends CompositeService implements Recoverable { YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES); if (minVcores <= 0 || minVcores > maxVcores) { - throw new YarnException("Invalid resource scheduler vcores" + throw new YarnRuntimeException("Invalid resource scheduler vcores" + " allocation configuration" + ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES + "=" + minVcores @@ -451,7 +451,7 @@ public class ResourceManager extends CompositeService implements Recoverable { try { this.eventProcessor.join(); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } super.stop(); } @@ -470,7 +470,7 @@ public class ResourceManager extends CompositeService implements Recoverable { } this.eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } } @@ -578,7 +578,7 @@ public class ResourceManager extends CompositeService implements Recoverable { try { doSecureLogin(); } catch(IOException ie) { - throw new YarnException("Failed to login", ie); + throw new YarnRuntimeException("Failed to login", ie); } this.appTokenSecretManager.start(); @@ -603,7 +603,7 @@ public class ResourceManager extends CompositeService implements Recoverable { try { rmDTSecretManager.startThreads(); } catch(IOException ie) { - throw new YarnException("Failed to start secret manager threads", ie); + throw new YarnRuntimeException("Failed to start secret manager threads", ie); } if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java index 930473cb6c6..25776844b76 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java @@ -28,11 +28,11 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.Node; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.PolicyProvider; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -110,7 +110,7 @@ public class ResourceTrackerService extends AbstractService implements conf.getLong(YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS, YarnConfiguration.DEFAULT_RM_NM_HEARTBEAT_INTERVAL_MS); if (nextHeartBeatInterval <= 0) { - throw new YarnException("Invalid Configuration. " + throw new YarnRuntimeException("Invalid Configuration. " + YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS + " should be larger than 0."); } @@ -161,7 +161,7 @@ public class ResourceTrackerService extends AbstractService implements @SuppressWarnings("unchecked") @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { NodeId nodeId = request.getNodeId(); @@ -230,7 +230,7 @@ public class ResourceTrackerService extends AbstractService implements @SuppressWarnings("unchecked") @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeStatus remoteNodeStatus = request.getNodeStatus(); /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java index b95d2aad82d..4ec82e476be 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java @@ -48,7 +48,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -97,7 +97,7 @@ public class AMLauncher implements Runnable { containerMgrProxy = getContainerMgrProxy(masterContainerID); } - private void launch() throws IOException, YarnRemoteException { + private void launch() throws IOException, YarnException { connect(); ContainerId masterContainerID = masterContainer.getId(); ApplicationSubmissionContext applicationContext = @@ -115,7 +115,7 @@ public class AMLauncher implements Runnable { + " for AM " + application.getAppAttemptId()); } - private void cleanup() throws IOException, YarnRemoteException { + private void cleanup() throws IOException, YarnException { connect(); ContainerId containerId = masterContainer.getId(); StopContainerRequest stopRequest = @@ -245,7 +245,7 @@ public class AMLauncher implements Runnable { cleanup(); } catch(IOException ie) { LOG.info("Error cleaning master ", ie); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.info("Error cleaning master ", e); } break; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java index 0f32313b3cf..23ffd37edd0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java @@ -34,7 +34,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; @@ -389,7 +389,7 @@ public class RMAppImpl implements RMApp, Recoverable { case FAILED: return YarnApplicationState.FAILED; } - throw new YarnException("Unknown state passed!"); + throw new YarnRuntimeException("Unknown state passed!"); } private FinalApplicationStatus createFinalApplicationStatus(RMAppState state) { @@ -408,7 +408,7 @@ public class RMAppImpl implements RMApp, Recoverable { case KILLED: return FinalApplicationStatus.KILLED; } - throw new YarnException("Unknown state passed!"); + throw new YarnRuntimeException("Unknown state passed!"); } @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/InvalidResourceRequestException.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/InvalidResourceRequestException.java index 3d1e7dda432..27628f3703f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/InvalidResourceRequestException.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/InvalidResourceRequestException.java @@ -18,14 +18,14 @@ package org.apache.hadoop.yarn.server.resourcemanager.scheduler; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; /** * The exception is thrown when the requested resource is out of the range * of the configured lower and upper resource boundaries. * */ -public class InvalidResourceRequestException extends YarnException { +public class InvalidResourceRequestException extends YarnRuntimeException { public InvalidResourceRequestException(Throwable cause) { super(cause); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java index 7ad05b241a2..af626aad9ed 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.Task.State; @@ -127,7 +127,7 @@ public class Application { return used; } - public synchronized void submit() throws IOException, YarnRemoteException { + public synchronized void submit() throws IOException, YarnException { ApplicationSubmissionContext context = recordFactory.newRecordInstance(ApplicationSubmissionContext.class); context.setApplicationId(this.applicationId); context.setQueue(this.queue); @@ -201,7 +201,7 @@ public class Application { } public synchronized void finishTask(Task task) throws IOException, - YarnRemoteException { + YarnException { Set tasks = this.tasks.get(task.getPriority()); if (!tasks.remove(task)) { throw new IllegalStateException( @@ -288,7 +288,7 @@ public class Application { } public synchronized void assign(List containers) - throws IOException, YarnRemoteException { + throws IOException, YarnException { int numContainers = containers.size(); // Schedule in priority order @@ -307,12 +307,12 @@ public class Application { assignedContainers + "/" + numContainers); } - public synchronized void schedule() throws IOException, YarnRemoteException { + public synchronized void schedule() throws IOException, YarnException { assign(getResources()); } private synchronized void assign(Priority priority, NodeType type, - List containers) throws IOException, YarnRemoteException { + List containers) throws IOException, YarnException { for (Iterator i=containers.iterator(); i.hasNext();) { Container container = i.next(); String host = container.getNodeId().toString(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java index 08577c87af3..01744999486 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java @@ -44,7 +44,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEvent; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; @@ -206,7 +206,7 @@ public class MockRM extends ResourceManager { public SubmitApplicationResponse run() { try { return client.submitApplication(req); - } catch (YarnRemoteException e) { + } catch (YarnException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java index 6a596cc6f83..3e86aae2b04 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.records.NodeHealthStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -83,7 +83,7 @@ public class NodeManager implements ContainerManager { public NodeManager(String hostName, int containerManagerPort, int httpPort, String rackName, Resource capability, ResourceTrackerService resourceTrackerService, RMContext rmContext) - throws IOException, YarnRemoteException { + throws IOException, YarnException { this.containerManagerAddress = hostName + ":" + containerManagerPort; this.nodeHttpAddress = hostName + ":" + httpPort; this.rackName = rackName; @@ -144,7 +144,7 @@ public class NodeManager implements ContainerManager { } return containerStatuses; } - public void heartbeat() throws IOException, YarnRemoteException { + public void heartbeat() throws IOException, YarnException { NodeStatus nodeStatus = org.apache.hadoop.yarn.server.resourcemanager.NodeManager.createNodeStatus( nodeId, getContainerStatuses(containers)); @@ -160,7 +160,7 @@ public class NodeManager implements ContainerManager { @Override synchronized public StartContainerResponse startContainer( StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { Token containerToken = request.getContainerToken(); ContainerTokenIdentifier tokenId = null; @@ -226,7 +226,7 @@ public class NodeManager implements ContainerManager { @Override synchronized public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { ContainerId containerID = request.getContainerId(); String applicationId = String.valueOf( containerID.getApplicationAttemptId().getApplicationId().getId()); @@ -278,7 +278,7 @@ public class NodeManager implements ContainerManager { } @Override - synchronized public GetContainerStatusResponse getContainerStatus(GetContainerStatusRequest request) throws YarnRemoteException { + synchronized public GetContainerStatusResponse getContainerStatus(GetContainerStatusRequest request) throws YarnException { ContainerId containerId = request.getContainerId(); List appContainers = containers.get( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAMAuthorization.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAMAuthorization.java index 815f62346dd..d25418d86c5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAMAuthorization.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAMAuthorization.java @@ -45,7 +45,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.StopContainerResponse; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; @@ -77,21 +77,21 @@ public class TestAMAuthorization { @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { amTokens = request.getContainerLaunchContext().getTokens(); return null; } @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { // TODO Auto-generated method stub return null; } @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException { + GetContainerStatusRequest request) throws YarnException { // TODO Auto-generated method stub return null; } @@ -255,7 +255,7 @@ public class TestAMAuthorization { try { client.registerApplicationMaster(request); Assert.fail("Should fail with authorization error"); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.assertTrue(e.getMessage().contains( "Unauthorized request from ApplicationMaster. " + "Expected ApplicationAttemptID: " diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java index afd951324c3..71c2fe098f6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java @@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; @@ -170,7 +170,7 @@ public class TestAppManager{ } public void submitApplication( ApplicationSubmissionContext submissionContext, String user) - throws YarnRemoteException { + throws YarnException { super.submitApplication(submissionContext, System.currentTimeMillis(), false, user); } @@ -443,7 +443,7 @@ public class TestAppManager{ try { appMonitor.submitApplication(asContext, "test"); Assert.fail("Exception is expected when applicationId is duplicate."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.assertTrue("The thrown exception is not the expectd one.", e.getMessage().contains("Cannot add a duplicate!")); } @@ -465,7 +465,7 @@ public class TestAppManager{ appMonitor.submitApplication(asContext, "test"); Assert.fail("Application submission should fail because resource" + " request is invalid."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { // Exception is expected // TODO Change this to assert the expected exception type - post YARN-142 // sub-task related to specialized exceptions. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java index 1f0c985a43b..ef36858e9ed 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -321,7 +321,7 @@ public class TestApplicationACLs { try { enemyRmClient.forceKillApplication(finishAppRequest); Assert.fail("App killing by the enemy should fail!!"); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.info("Got exception while killing app as the enemy", e); Assert .assertTrue(e.getMessage().contains( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java index 2fd65ea12ed..9231442a3f6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java @@ -36,7 +36,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -68,7 +68,7 @@ public class TestApplicationMasterLauncher { @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { LOG.info("Container started by MyContainerManager: " + request); launched = true; Map env = @@ -97,7 +97,7 @@ public class TestApplicationMasterLauncher { @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { LOG.info("Container cleaned up by MyContainerManager"); cleanedup = true; return null; @@ -105,7 +105,7 @@ public class TestApplicationMasterLauncher { @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException { + GetContainerStatusRequest request) throws YarnException { return null; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java index 0a955e34908..d819467a9c2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java @@ -59,7 +59,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -147,7 +147,7 @@ public class TestClientRMService { } @Test - public void testGetApplicationReport() throws YarnRemoteException { + public void testGetApplicationReport() throws YarnException { RMContext rmContext = mock(RMContext.class); when(rmContext.getRMApps()).thenReturn( new ConcurrentHashMap()); @@ -209,7 +209,7 @@ public class TestClientRMService { try { checkTokenRenewal(owner, other); return null; - } catch (YarnRemoteException ex) { + } catch (YarnException ex) { Assert.assertTrue(ex.getMessage().contains( "Client " + owner.getUserName() + " tries to renew a token with renewer specified as " + @@ -237,7 +237,7 @@ public class TestClientRMService { } private void checkTokenRenewal(UserGroupInformation owner, - UserGroupInformation renewer) throws IOException, YarnRemoteException { + UserGroupInformation renewer) throws IOException, YarnException { RMDelegationTokenIdentifier tokenIdentifier = new RMDelegationTokenIdentifier( new Text(owner.getUserName()), new Text(renewer.getUserName()), null); @@ -279,7 +279,7 @@ public class TestClientRMService { appId1, null, null); try { rmService.submitApplication(submitRequest1); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.fail("Exception is not expected."); } RMApp app1 = rmContext.getRMApps().get(appId1); @@ -297,7 +297,7 @@ public class TestClientRMService { appId2, name, queue); try { rmService.submitApplication(submitRequest2); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.fail("Exception is not expected."); } RMApp app2 = rmContext.getRMApps().get(appId2); @@ -309,7 +309,7 @@ public class TestClientRMService { try { rmService.submitApplication(submitRequest2); Assert.fail("Exception is expected."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.assertTrue("The thrown exception is not expected.", e.getMessage().contains("Cannot add a duplicate!")); } @@ -318,7 +318,7 @@ public class TestClientRMService { @Test(timeout=4000) public void testConcurrentAppSubmit() throws IOException, InterruptedException, BrokenBarrierException, - YarnRemoteException { + YarnException { YarnScheduler yarnScheduler = mockYarnScheduler(); RMContext rmContext = mock(RMContext.class); mockRMContext(yarnScheduler, rmContext); @@ -368,7 +368,7 @@ public class TestClientRMService { public void run() { try { rmService.submitApplication(submitRequest1); - } catch (YarnRemoteException e) {} + } catch (YarnException e) {} } }; t.start(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java index adffc630d7f..00988939824 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java @@ -58,7 +58,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.recovery.NullRMStateStore; @@ -134,7 +134,7 @@ public class TestClientRMTokens { clientRMWithDT.getNewApplication(request); } catch (IOException e) { fail("Unexpected exception" + e); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Unexpected exception" + e); } @@ -159,7 +159,7 @@ public class TestClientRMTokens { clientRMWithDT.getNewApplication(request); } catch (IOException e) { fail("Unexpected exception" + e); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Unexpected exception" + e); } @@ -199,7 +199,7 @@ public class TestClientRMTokens { clientRMWithDT.getNewApplication(request); } catch (IOException e) { fail("Unexpected exception" + e); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Unexpected exception" + e); } cancelDelegationToken(loggedInUser, clientRMService, token); @@ -217,7 +217,7 @@ public class TestClientRMTokens { clientRMWithDT.getNewApplication(request); fail("Should not have succeeded with a cancelled delegation token"); } catch (IOException e) { - } catch (YarnRemoteException e) { + } catch (YarnException e) { } @@ -357,7 +357,7 @@ public class TestClientRMTokens { .doAs(new PrivilegedExceptionAction() { @Override public org.apache.hadoop.yarn.api.records.Token run() - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetDelegationTokenRequest request = Records .newRecord(GetDelegationTokenRequest.class); request.setRenewer(renewerString); @@ -374,7 +374,7 @@ public class TestClientRMTokens { throws IOException, InterruptedException { long nextExpTime = loggedInUser.doAs(new PrivilegedExceptionAction() { @Override - public Long run() throws YarnRemoteException, IOException { + public Long run() throws YarnException, IOException { RenewDelegationTokenRequest request = Records .newRecord(RenewDelegationTokenRequest.class); request.setDelegationToken(dToken); @@ -391,7 +391,7 @@ public class TestClientRMTokens { throws IOException, InterruptedException { loggedInUser.doAs(new PrivilegedExceptionAction() { @Override - public Void run() throws YarnRemoteException, IOException { + public Void run() throws YarnException, IOException { CancelDelegationTokenRequest request = Records .newRecord(CancelDelegationTokenRequest.class); request.setDelegationToken(dToken); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceManager.java index a4dfbf76562..ffe8c811a54 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceManager.java @@ -29,13 +29,13 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetworkTopology; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.NodeHealthStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.junit.After; @@ -62,7 +62,7 @@ public class TestResourceManager { private org.apache.hadoop.yarn.server.resourcemanager.NodeManager registerNode(String hostName, int containerManagerPort, int httpPort, String rackName, Resource capability) throws IOException, - YarnRemoteException { + YarnException { return new org.apache.hadoop.yarn.server.resourcemanager.NodeManager( hostName, containerManagerPort, httpPort, rackName, capability, resourceManager.getResourceTrackerService(), resourceManager @@ -71,7 +71,7 @@ public class TestResourceManager { // @Test public void testResourceAllocation() throws IOException, - YarnRemoteException { + YarnException { LOG.info("--- START: testResourceAllocation ---"); final int memory = 4 * 1024; @@ -199,7 +199,7 @@ public class TestResourceManager { resourceManager.init(conf); fail("Exception is expected because the global max attempts" + " is negative."); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { // Exception is expected. assertTrue("The thrown exception is not the expected one.", e.getMessage().startsWith( @@ -213,7 +213,7 @@ public class TestResourceManager { resourceManager.init(conf); fail("Exception is expected because the min memory allocation is" + " larger than the max memory allocation."); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { // Exception is expected. assertTrue("The thrown exception is not the expected one.", e.getMessage().startsWith( @@ -227,7 +227,7 @@ public class TestResourceManager { resourceManager.init(conf); fail("Exception is expected because the min vcores allocation is" + " larger than the max vcores allocation."); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { // Exception is expected. assertTrue("The thrown exception is not the expected one.", e.getMessage().startsWith( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java index c29d8f3e25c..2a45b2ef2f1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java @@ -28,7 +28,7 @@ import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.InlineDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; @@ -88,7 +88,7 @@ public class TestRMNMRPCResponseId { } @Test - public void testRPCResponseId() throws IOException, YarnRemoteException { + public void testRPCResponseId() throws IOException, YarnException { String node = "localhost"; Resource capability = BuilderUtils.newResource(1024, 1); RegisterNodeManagerRequest request = recordFactory.newRecordInstance(RegisterNodeManagerRequest.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java index 6e37df49b3f..29fae23337d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java @@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.Application; import org.apache.hadoop.yarn.server.resourcemanager.MockNodes; import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl; @@ -101,7 +101,7 @@ public class TestCapacityScheduler { private org.apache.hadoop.yarn.server.resourcemanager.NodeManager registerNode(String hostName, int containerManagerPort, int httpPort, String rackName, Resource capability) - throws IOException, YarnRemoteException { + throws IOException, YarnException { return new org.apache.hadoop.yarn.server.resourcemanager.NodeManager( hostName, containerManagerPort, httpPort, rackName, capability, resourceManager.getResourceTrackerService(), resourceManager diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java index 108c74c2413..4fe344796dd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java @@ -38,7 +38,7 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.InlineDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.Application; @@ -87,7 +87,7 @@ public class TestFifoScheduler { private org.apache.hadoop.yarn.server.resourcemanager.NodeManager registerNode(String hostName, int containerManagerPort, int nmHttpPort, String rackName, Resource capability) throws IOException, - YarnRemoteException { + YarnException { return new org.apache.hadoop.yarn.server.resourcemanager.NodeManager( hostName, containerManagerPort, nmHttpPort, rackName, capability, resourceManager.getResourceTrackerService(), resourceManager diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientTokens.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientTokens.java index 11dc7c18cd9..883286d1f37 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientTokens.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientTokens.java @@ -44,7 +44,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenInfo; import org.apache.hadoop.security.token.TokenSelector; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ContainerManager; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; @@ -59,7 +59,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.DrainDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.client.ClientToAMTokenSecretManager; import org.apache.hadoop.yarn.security.client.ClientTokenIdentifier; import org.apache.hadoop.yarn.security.client.ClientTokenSelector; @@ -80,7 +80,7 @@ public class TestClientTokens { @SuppressWarnings("unused") public static final long versionID = 1L; - public void ping() throws YarnRemoteException, IOException; + public void ping() throws YarnException, IOException; } private static class CustomSecurityInfo extends SecurityInfo { @@ -123,7 +123,7 @@ public class TestClientTokens { } @Override - public void ping() throws YarnRemoteException, IOException { + public void ping() throws YarnException, IOException { this.pinged = true; } @@ -141,7 +141,7 @@ public class TestClientTokens { .setNumHandlers(1).setSecretManager(secretManager) .setInstance(this).build(); } catch (Exception e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } server.start(); this.address = NetUtils.getConnectAddress(server); @@ -155,7 +155,7 @@ public class TestClientTokens { @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { this.clientTokensSecret = request.getContainerLaunchContext().getEnvironment() .get(ApplicationConstants.APPLICATION_CLIENT_SECRET_ENV_NAME); @@ -164,13 +164,13 @@ public class TestClientTokens { @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { return null; } @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException { + GetContainerStatusRequest request) throws YarnException { return null; } @@ -283,7 +283,7 @@ public class TestClientTokens { fail("Connection initiation with illegally modified " + "tokens is expected to fail."); return null; - } catch (YarnRemoteException ex) { + } catch (YarnException ex) { fail("Cannot get a YARN remote exception as " + "it will indicate RPC success"); throw ex; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java index 979d1c3dbfe..7d4bbefbbd3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java @@ -31,10 +31,10 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ShellCommandExecutor; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -92,7 +92,7 @@ public class MiniYARNCluster extends CompositeService { new Path(targetWorkDir.getAbsolutePath()), true); } catch (Exception e) { LOG.warn("COULD NOT CLEANUP", e); - throw new YarnException("could not cleanup test dir", e); + throw new YarnRuntimeException("could not cleanup test dir", e); } if (Shell.WINDOWS) { @@ -109,7 +109,7 @@ public class MiniYARNCluster extends CompositeService { try { FileContext.getLocalFSFileContext().delete(new Path(linkPath), true); } catch (IOException e) { - throw new YarnException("could not cleanup symlink: " + linkPath, e); + throw new YarnRuntimeException("could not cleanup symlink: " + linkPath, e); } // Guarantee target exists before creating symlink. @@ -120,7 +120,7 @@ public class MiniYARNCluster extends CompositeService { try { shexec.execute(); } catch (IOException e) { - throw new YarnException(String.format( + throw new YarnRuntimeException(String.format( "failed to create symlink from %s to %s, shell output: %s", linkPath, targetPath, shexec.getOutput()), e); } @@ -216,7 +216,7 @@ public class MiniYARNCluster extends CompositeService { } super.start(); } catch (Throwable t) { - throw new YarnException(t); + throw new YarnRuntimeException(t); } LOG.info("MiniYARN ResourceManager address: " + getConfig().get(YarnConfiguration.RM_ADDRESS)); @@ -321,7 +321,7 @@ public class MiniYARNCluster extends CompositeService { } super.start(); } catch (Throwable t) { - throw new YarnException(t); + throw new YarnRuntimeException(t); } } @@ -357,13 +357,13 @@ public class MiniYARNCluster extends CompositeService { @Override public NodeHeartbeatResponse nodeHeartbeat( - NodeHeartbeatRequest request) throws YarnRemoteException, + NodeHeartbeatRequest request) throws YarnException, IOException { NodeHeartbeatResponse response = recordFactory.newRecordInstance( NodeHeartbeatResponse.class); try { response = rt.nodeHeartbeat(request); - } catch (YarnRemoteException ioe) { + } catch (YarnException ioe) { LOG.info("Exception in heartbeat from node " + request.getNodeStatus().getNodeId(), ioe); throw RPCUtil.getRemoteException(ioe); @@ -374,12 +374,12 @@ public class MiniYARNCluster extends CompositeService { @Override public RegisterNodeManagerResponse registerNodeManager( RegisterNodeManagerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory. newRecordInstance(RegisterNodeManagerResponse.class); try { response = rt.registerNodeManager(request); - } catch (YarnRemoteException ioe) { + } catch (YarnException ioe) { LOG.info("Exception in node registration from " + request.getNodeId().toString(), ioe); throw RPCUtil.getRemoteException(ioe); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java index b56ee86f1d9..ece57b80333 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java @@ -63,7 +63,7 @@ import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -129,7 +129,7 @@ public class TestContainerManagerSecurity { } private void testAuthenticatedUser() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { LOG.info("Running test for authenticated user"); @@ -188,10 +188,10 @@ public class TestContainerManagerSecurity { * * @throws IOException * @throws InterruptedException - * @throws YarnRemoteException + * @throws YarnException */ private void testMaliceUser() throws IOException, InterruptedException, - YarnRemoteException { + YarnException { LOG.info("Running test for malice user"); @@ -286,7 +286,7 @@ public class TestContainerManagerSecurity { client.startContainer(request); fail("Connection initiation with illegally modified " + "tokens is expected to fail."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.error("Got exception", e); fail("Cannot get a YARN remote exception as " + "it will indicate RPC success"); @@ -306,7 +306,7 @@ public class TestContainerManagerSecurity { } private void testExpiredTokens() throws IOException, InterruptedException, - YarnRemoteException { + YarnException { LOG.info("\n\nRunning test for malice user"); @@ -406,7 +406,7 @@ public class TestContainerManagerSecurity { private AMRMProtocol submitAndRegisterApplication( ResourceManager resourceManager, final YarnRPC yarnRPC, ApplicationId appID) throws IOException, - UnsupportedFileSystemException, YarnRemoteException, + UnsupportedFileSystemException, YarnException, InterruptedException { // Use ping to simulate sleep on Windows. @@ -491,7 +491,7 @@ public class TestContainerManagerSecurity { } private Container requestAndGetContainer(AMRMProtocol scheduler, - ApplicationId appID) throws YarnRemoteException, InterruptedException, + ApplicationId appID) throws YarnException, InterruptedException, IOException { // Request a container allocation. diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java index 120cf72858c..a5aad90d63d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -78,11 +78,11 @@ public class AppReportFetcher { * Get a report for the specified app. * @param appId the id of the application to get. * @return the ApplicationReport for that app. - * @throws YarnRemoteException on any error. + * @throws YarnException on any error. * @throws IOException */ public ApplicationReport getApplicationReport(ApplicationId appId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetApplicationReportRequest request = recordFactory .newRecordInstance(GetApplicationReportRequest.class); request.setApplicationId(appId); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java index cec30d1c55b..4a9077b1812 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java @@ -26,7 +26,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.AbstractService; @@ -69,7 +69,7 @@ public class WebAppProxy extends AbstractService { fetcher = new AppReportFetcher(conf); bindAddress = conf.get(YarnConfiguration.PROXY_ADDRESS); if(bindAddress == null || bindAddress.isEmpty()) { - throw new YarnException(YarnConfiguration.PROXY_ADDRESS + + throw new YarnRuntimeException(YarnConfiguration.PROXY_ADDRESS + " is not set so the proxy will not run."); } LOG.info("Instantiating Proxy at " + bindAddress); @@ -97,7 +97,7 @@ public class WebAppProxy extends AbstractService { proxyServer.start(); } catch (IOException e) { LOG.fatal("Could not start proxy web server",e); - throw new YarnException("Could not start proxy web server",e); + throw new YarnRuntimeException("Could not start proxy web server",e); } super.start(); } @@ -109,7 +109,7 @@ public class WebAppProxy extends AbstractService { proxyServer.stop(); } catch (Exception e) { LOG.fatal("Error stopping proxy web server", e); - throw new YarnException("Error stopping proxy web server",e); + throw new YarnRuntimeException("Error stopping proxy web server",e); } } super.stop(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java index c824cfb6c06..b0ba8a17711 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java @@ -26,7 +26,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.CompositeService; @@ -56,7 +56,7 @@ public class WebAppProxyServer extends CompositeService { try { doSecureLogin(conf); } catch(IOException ie) { - throw new YarnException("Proxy Server Failed to login", ie); + throw new YarnRuntimeException("Proxy Server Failed to login", ie); } proxy = new WebAppProxy(); addService(proxy); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java index a44d084ea28..5fd426c807f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java @@ -50,7 +50,7 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.util.TrackingUriPlugin; @@ -217,7 +217,7 @@ public class WebAppProxyServlet extends HttpServlet { } private ApplicationReport getApplicationReport(ApplicationId id) - throws IOException, YarnRemoteException { + throws IOException, YarnException { return ((AppReportFetcher) getServletContext() .getAttribute(WebAppProxy.FETCHER_ATTRIBUTE)).getApplicationReport(id); } @@ -335,7 +335,7 @@ public class WebAppProxyServlet extends HttpServlet { } catch(URISyntaxException e) { throw new IOException(e); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } }