YARN-5963. Spelling errors in logging and exceptions for node manager, client, web-proxy, common, and app history code (gsohn via rkanter)

(cherry picked from commit 72fe546841)

Conflicts:
	hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
This commit is contained in:
Robert Kanter 2016-12-07 15:07:25 -08:00
parent 0aaaeea268
commit 9cba7c6d71
12 changed files with 14 additions and 14 deletions

View File

@ -240,7 +240,7 @@ public class LogsCLI extends Configured implements Tool {
if (appState == YarnApplicationState.NEW if (appState == YarnApplicationState.NEW
|| appState == YarnApplicationState.NEW_SAVING || appState == YarnApplicationState.NEW_SAVING
|| appState == YarnApplicationState.SUBMITTED) { || appState == YarnApplicationState.SUBMITTED) {
System.err.println("Logs are not avaiable right now."); System.err.println("Logs are not available right now.");
return -1; return -1;
} }
} catch (IOException | YarnException e) { } catch (IOException | YarnException e) {

View File

@ -379,7 +379,7 @@ public class RMAdminCLI extends HAAdmin {
} }
if (nodesDecommissioning) { if (nodesDecommissioning) {
System.out.println("Graceful decommissioning not completed in " + timeout System.out.println("Graceful decommissioning not completed in " + timeout
+ " seconds, issueing forceful decommissioning command."); + " seconds, issuing forceful decommissioning command.");
RefreshNodesRequest forcefulRequest = RefreshNodesRequest RefreshNodesRequest forcefulRequest = RefreshNodesRequest
.newInstance(DecommissionType.FORCEFUL); .newInstance(DecommissionType.FORCEFUL);
adminProtocol.refreshNodes(forcefulRequest); adminProtocol.refreshNodes(forcefulRequest);

View File

@ -259,7 +259,7 @@ public class FSDownload implements Callable<Path> {
if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) { if (resource.getVisibility() == LocalResourceVisibility.PUBLIC) {
if (!isPublic(sourceFs, sCopy, sStat, statCache)) { if (!isPublic(sourceFs, sCopy, sStat, statCache)) {
throw new IOException("Resource " + sCopy + throw new IOException("Resource " + sCopy +
" is not publicly accessable and as such cannot be part of the" + " is not publicly accessible and as such cannot be part of the" +
" public cache."); " public cache.");
} }
} }

View File

@ -356,7 +356,7 @@ public class RegistrySecurity extends AbstractService {
* @return the system principals * @return the system principals
*/ */
public List<ACL> getSystemACLs() { public List<ACL> getSystemACLs() {
Preconditions.checkNotNull(systemACLs, "registry security is unitialized"); Preconditions.checkNotNull(systemACLs, "registry security is uninitialized");
return Collections.unmodifiableList(systemACLs); return Collections.unmodifiableList(systemACLs);
} }

View File

@ -721,7 +721,7 @@ public class ApplicationHistoryManagerOnTimelineStore extends AbstractService
app.appReport.getApplicationId())) { app.appReport.getApplicationId())) {
throw new AuthorizationException("User " throw new AuthorizationException("User "
+ UserGroupInformation.getCurrentUser().getShortUserName() + UserGroupInformation.getCurrentUser().getShortUserName()
+ " does not have privilage to see this application " + " does not have privilege to see this application "
+ app.appReport.getApplicationId()); + app.appReport.getApplicationId());
} }
} finally { } finally {

View File

@ -509,7 +509,7 @@ public class WindowsSecureContainerExecutor extends DefaultContainerExecutor {
output.append(buf, 0, nRead); output.append(buf, 0, nRead);
} }
} catch (Throwable t) { } catch (Throwable t) {
LOG.error("Error occured reading the process stdout", t); LOG.error("Error occurred reading the process stdout", t);
} }
} }
}; };
@ -722,7 +722,7 @@ public class WindowsSecureContainerExecutor extends DefaultContainerExecutor {
} }
catch(Throwable e) { catch(Throwable e) {
LOG.warn(String.format( LOG.warn(String.format(
"An exception occured during the cleanup of localizer job %s:%n%s", "An exception occurred during the cleanup of localizer job %s:%n%s",
localizerPid, localizerPid,
org.apache.hadoop.util.StringUtils.stringifyException(e))); org.apache.hadoop.util.StringUtils.stringifyException(e)));
} }

View File

@ -146,7 +146,7 @@ public class AuxServices extends AbstractService
} }
// TODO better use s.getName()? // TODO better use s.getName()?
if(!sName.equals(s.getName())) { if(!sName.equals(s.getName())) {
LOG.warn("The Auxilurary Service named '"+sName+"' in the " LOG.warn("The Auxiliary Service named '"+sName+"' in the "
+"configuration is for "+s.getClass()+" which has " +"configuration is for "+s.getClass()+" which has "
+"a name of '"+s.getName()+"'. Because these are " +"a name of '"+s.getName()+"'. Because these are "
+"not the same tools trying to send ServiceData and read " +"not the same tools trying to send ServiceData and read "

View File

@ -1576,7 +1576,7 @@ public class ContainerManagerImpl extends CompositeService implements
authorizeUser(remoteUgi, nmTokenIdentifier); authorizeUser(remoteUgi, nmTokenIdentifier);
if (!nmTokenIdentifier.getApplicationAttemptId().getApplicationId() if (!nmTokenIdentifier.getApplicationAttemptId().getApplicationId()
.equals(containerId.getApplicationAttemptId().getApplicationId())) { .equals(containerId.getApplicationAttemptId().getApplicationId())) {
throw new YarnException("ApplicationMaster not autorized to perform " + throw new YarnException("ApplicationMaster not authorized to perform " +
"["+ op + "] on Container [" + containerId + "]!!"); "["+ op + "] on Container [" + containerId + "]!!");
} }
Container container = context.getContainers().get(containerId); Container container = context.getContainers().get(containerId);

View File

@ -1128,7 +1128,7 @@ public class ResourceLocalizationService extends CompositeService
LOG.error("local path for PRIVATE localization could not be " + LOG.error("local path for PRIVATE localization could not be " +
"found. Disks might have failed.", e); "found. Disks might have failed.", e);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
LOG.error("Inorrect path for PRIVATE localization." LOG.error("Incorrect path for PRIVATE localization."
+ next.getResource().getFile(), e); + next.getResource().getFile(), e);
} catch (URISyntaxException e) { } catch (URISyntaxException e) {
LOG.error( LOG.error(

View File

@ -494,7 +494,7 @@ public class AppLogAggregatorImpl implements AppLogAggregator {
doAppLogAggregation(); doAppLogAggregation();
} catch (Exception e) { } catch (Exception e) {
// do post clean up of log directories on any exception // do post clean up of log directories on any exception
LOG.error("Error occured while aggregating the log for the application " LOG.error("Error occurred while aggregating the log for the application "
+ appId, e); + appId, e);
doAppLogAggregationPostCleanUp(); doAppLogAggregationPostCleanUp();
} finally { } finally {

View File

@ -49,7 +49,7 @@ public class ProxyUriUtils {
private static String uriEncode(Object o) { private static String uriEncode(Object o) {
try { try {
assert (o != null) : "o canot be null"; assert (o != null) : "o cannot be null";
return URLEncoder.encode(o.toString(), "UTF-8"); return URLEncoder.encode(o.toString(), "UTF-8");
} catch (UnsupportedEncodingException e) { } catch (UnsupportedEncodingException e) {
//This should never happen //This should never happen

View File

@ -62,7 +62,7 @@ public class WebAppProxy extends AbstractService {
} else if ("kerberos".equals(auth)) { } else if ("kerberos".equals(auth)) {
isSecurityEnabled = true; isSecurityEnabled = true;
} else { } else {
LOG.warn("Unrecongized attribute value for " + LOG.warn("Unrecognized attribute value for " +
CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION +
" of " + auth); " of " + auth);
} }