diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 7ac531ff0fa..07bec7bac50 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -175,6 +175,9 @@ Release 0.23.1 - Unreleased MAPREDUCE-3464. mapreduce jsp pages missing DOCTYPE. (Dave Vronay via mattf) + MAPREDUCE-3265. Removed debug logs during job submission to LOG.debug to + cut down noise. (acmurthy) + Release 0.23.0 - 2011-11-01 INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java index 597b2edaa39..9cd2a2c05bd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java @@ -72,7 +72,7 @@ public class TaskLog { if (!LOG_DIR.exists()) { boolean b = LOG_DIR.mkdirs(); if (!b) { - LOG.warn("mkdirs failed. Ignoring."); + LOG.debug("mkdirs failed. Ignoring."); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java index 4828ebacaa0..eb838fe8a7a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java @@ -108,7 +108,7 @@ public class Cluster { break; } else { - LOG.info("Cannot pick " + provider.getClass().getName() + LOG.debug("Cannot pick " + provider.getClass().getName() + " as the ClientProtocolProvider - returned null protocol"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java index 24c13e745df..c5ba59b36fc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java @@ -30,12 +30,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; -import org.apache.hadoop.mapreduce.v2.security.client.ClientHSSecurityInfo; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.security.SecurityInfo; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.YarnException; -import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.ipc.YarnRPC; public class ClientCache { @@ -79,9 +76,9 @@ public class ClientCache { if (StringUtils.isEmpty(serviceAddr)) { return null; } - LOG.info("Connecting to HistoryServer at: " + serviceAddr); + LOG.debug("Connecting to HistoryServer at: " + serviceAddr); final YarnRPC rpc = YarnRPC.create(conf); - LOG.info("Connected to HistoryServer at: " + serviceAddr); + LOG.debug("Connected to HistoryServer at: " + serviceAddr); UserGroupInformation currentUser = UserGroupInformation.getCurrentUser(); return currentUser.doAs(new PrivilegedAction() { @Override diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java index 0bed43d71c6..99b7e8826e7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java @@ -143,7 +143,7 @@ public class ClientServiceDelegate { || YarnApplicationState.RUNNING == application .getYarnApplicationState()) { if (application == null) { - LOG.info("Could not get Job info from RM for job " + jobId + LOG.debug("Could not get Job info from RM for job " + jobId + ". Redirecting to job history server."); return checkAndGetHSProxy(null, JobState.NEW); } @@ -169,8 +169,8 @@ public class ClientServiceDelegate { + ":" + addr.getPort())); UserGroupInformation.getCurrentUser().addToken(clientToken); } - LOG.info("Tracking Url of JOB is " + application.getTrackingUrl()); - LOG.info("Connecting to " + serviceAddr); + LOG.info("The url to track the job: " + application.getTrackingUrl()); + LOG.debug("Connecting to " + serviceAddr); realProxy = instantiateAMProxy(serviceAddr); return realProxy; } catch (IOException e) { @@ -187,7 +187,7 @@ public class ClientServiceDelegate { } application = rm.getApplicationReport(appId); if (application == null) { - LOG.info("Could not get Job info from RM for job " + jobId + LOG.debug("Could not get Job info from RM for job " + jobId + ". Redirecting to job history server."); return checkAndGetHSProxy(null, JobState.RUNNING); } @@ -281,16 +281,13 @@ public class ClientServiceDelegate { LOG.debug("Tracing remote error ", e.getTargetException()); throw (YarnRemoteException) e.getTargetException(); } - LOG.info("Failed to contact AM/History for job " + jobId + - " retrying.."); - LOG.debug("Failed exception on AM/History contact", - e.getTargetException()); + LOG.debug("Failed to contact AM/History for job " + jobId + + " retrying..", e.getTargetException()); // Force reconnection by setting the proxy to null. realProxy = null; } catch (Exception e) { - LOG.info("Failed to contact AM/History for job " + jobId - + " Will retry.."); - LOG.debug("Failing to contact application master", e); + LOG.debug("Failed to contact AM/History for job " + jobId + + " Will retry..", e); // Force reconnection by setting the proxy to null. realProxy = null; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java index bef2154dcfe..1645ae88ebc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java @@ -25,7 +25,6 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; @@ -40,11 +39,9 @@ import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.security.SecurityInfo; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.ClientRMProtocol; -import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; @@ -56,6 +53,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest; +import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; @@ -67,13 +65,13 @@ import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; -import org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo; // TODO: This should be part of something like yarn-client. public class ResourceMgrDelegate { private static final Log LOG = LogFactory.getLog(ResourceMgrDelegate.class); + private final String rmAddress; private YarnConfiguration conf; ClientRMProtocol applicationsManager; private ApplicationId applicationId; @@ -92,21 +90,25 @@ public class ResourceMgrDelegate { YarnConfiguration.DEFAULT_RM_ADDRESS), YarnConfiguration.DEFAULT_RM_PORT, YarnConfiguration.RM_ADDRESS); - LOG.info("Connecting to ResourceManager at " + rmAddress); + this.rmAddress = rmAddress.toString(); + LOG.debug("Connecting to ResourceManager at " + rmAddress); applicationsManager = (ClientRMProtocol) rpc.getProxy(ClientRMProtocol.class, rmAddress, this.conf); - LOG.info("Connected to ResourceManager at " + rmAddress); + LOG.debug("Connected to ResourceManager at " + rmAddress); } /** * Used for injecting applicationsManager, mostly for testing. * @param conf the configuration object - * @param applicationsManager the handle to talk the resource managers {@link ClientRMProtocol}. + * @param applicationsManager the handle to talk the resource managers + * {@link ClientRMProtocol}. */ - public ResourceMgrDelegate(YarnConfiguration conf, ClientRMProtocol applicationsManager) { + public ResourceMgrDelegate(YarnConfiguration conf, + ClientRMProtocol applicationsManager) { this.conf = conf; this.applicationsManager = applicationsManager; + this.rmAddress = applicationsManager.toString(); } public void cancelDelegationToken(Token arg0) @@ -295,18 +297,22 @@ public class ResourceMgrDelegate { } - public ApplicationId submitApplication(ApplicationSubmissionContext appContext) + public ApplicationId submitApplication( + ApplicationSubmissionContext appContext) throws IOException { appContext.setApplicationId(applicationId); - SubmitApplicationRequest request = recordFactory.newRecordInstance(SubmitApplicationRequest.class); + SubmitApplicationRequest request = + recordFactory.newRecordInstance(SubmitApplicationRequest.class); request.setApplicationSubmissionContext(appContext); applicationsManager.submitApplication(request); - LOG.info("Submitted application " + applicationId + " to ResourceManager"); + LOG.info("Submitted application " + applicationId + " to ResourceManager" + + " at " + rmAddress); return applicationId; } public void killApplication(ApplicationId applicationId) throws IOException { - KillApplicationRequest request = recordFactory.newRecordInstance(KillApplicationRequest.class); + KillApplicationRequest request = + recordFactory.newRecordInstance(KillApplicationRequest.class); request.setApplicationId(applicationId); applicationsManager.forceKillApplication(request); LOG.info("Killing application " + applicationId); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index f77b6e86742..1762f6bb9c7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -276,7 +276,7 @@ public class YARNRunner implements ClientProtocol { Resource capability = recordFactory.newRecordInstance(Resource.class); capability.setMemory(conf.getInt(MRJobConfig.MR_AM_VMEM_MB, MRJobConfig.DEFAULT_MR_AM_VMEM_MB)); - LOG.info("AppMaster capability = " + capability); + LOG.debug("AppMaster capability = " + capability); // Setup LocalResources Map localResources = @@ -352,7 +352,7 @@ public class YARNRunner implements ClientProtocol { } vargsFinal.add(mergedCommand.toString()); - LOG.info("Command to launch container for ApplicationMaster is : " + LOG.debug("Command to launch container for ApplicationMaster is : " + mergedCommand); // Setup the CLASSPATH in environment diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java index ba1dc2ff6bf..1e3ca272967 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java @@ -37,12 +37,12 @@ import org.apache.hadoop.yarn.factory.providers.RpcFactoryProvider; */ public class HadoopYarnProtoRPC extends YarnRPC { - private static final Log LOG = LogFactory.getLog(HadoopYarnRPC.class); + private static final Log LOG = LogFactory.getLog(HadoopYarnProtoRPC.class); @Override public Object getProxy(Class protocol, InetSocketAddress addr, Configuration conf) { - LOG.info("Creating a HadoopYarnProtoRpc proxy for protocol " + protocol); + LOG.debug("Creating a HadoopYarnProtoRpc proxy for protocol " + protocol); return RpcFactoryProvider.getClientFactory(conf).getClient(protocol, 1, addr, conf); } @@ -57,11 +57,11 @@ public class HadoopYarnProtoRPC extends YarnRPC { InetSocketAddress addr, Configuration conf, SecretManager secretManager, int numHandlers) { - LOG.info("Creating a HadoopYarnProtoRpc server for protocol " + protocol + + LOG.debug("Creating a HadoopYarnProtoRpc server for protocol " + protocol + " with " + numHandlers + " handlers"); - return RpcFactoryProvider.getServerFactory(conf).getServer(protocol, instance, - addr, conf, secretManager, numHandlers); + return RpcFactoryProvider.getServerFactory(conf).getServer(protocol, + instance, addr, conf, secretManager, numHandlers); } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnRPC.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnRPC.java index 838693a8f46..3ad757da574 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnRPC.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnRPC.java @@ -45,7 +45,7 @@ public class HadoopYarnRPC extends YarnRPC { @Override public Object getProxy(Class protocol, InetSocketAddress addr, Configuration conf) { - LOG.info("Creating a HadoopYarnRpc proxy for protocol " + protocol); + LOG.debug("Creating a HadoopYarnRpc proxy for protocol " + protocol); RPC.setProtocolEngine(conf, protocol, AvroSpecificRpcEngine.class); try { return RPC.getProxy(protocol, 1, addr, conf); @@ -64,7 +64,7 @@ public class HadoopYarnRPC extends YarnRPC { InetSocketAddress addr, Configuration conf, SecretManager secretManager, int numHandlers) { - LOG.info("Creating a HadoopYarnRpc server for protocol " + protocol + + LOG.debug("Creating a HadoopYarnRpc server for protocol " + protocol + " with " + numHandlers + " handlers"); RPC.setProtocolEngine(conf, protocol, AvroSpecificRpcEngine.class); final RPC.Server hadoopServer; diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java index e4e61ddfaa7..1434326e5f5 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java @@ -46,7 +46,8 @@ public abstract class YarnRPC { int numHandlers); public static YarnRPC create(Configuration conf) { - LOG.info("Creating YarnRPC for " + conf.get(YarnConfiguration.IPC_RPC_IMPL)); + LOG.debug("Creating YarnRPC for " + + conf.get(YarnConfiguration.IPC_RPC_IMPL)); String clazzName = conf.get(YarnConfiguration.IPC_RPC_IMPL); if (clazzName == null) { clazzName = YarnConfiguration.DEFAULT_IPC_RPC_IMPL; diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ApplicationTokenSelector.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ApplicationTokenSelector.java index 083d9ad6fce..5900da31ea4 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ApplicationTokenSelector.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ApplicationTokenSelector.java @@ -39,9 +39,9 @@ public class ApplicationTokenSelector implements if (service == null) { return null; } - LOG.info("Looking for a token with service " + service.toString()); + LOG.debug("Looking for a token with service " + service.toString()); for (Token token : tokens) { - LOG.info("Token kind is " + token.getKind().toString() + LOG.debug("Token kind is " + token.getKind().toString() + " and the token's service name is " + token.getService()); if (ApplicationTokenIdentifier.KIND_NAME.equals(token.getKind()) && service.equals(token.getService())) { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientTokenSelector.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientTokenSelector.java index acbed1b11c8..07ecba06a6a 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientTokenSelector.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientTokenSelector.java @@ -39,9 +39,9 @@ public class ClientTokenSelector implements if (service == null) { return null; } - LOG.info("Looking for a token with service " + service.toString()); + LOG.debug("Looking for a token with service " + service.toString()); for (Token token : tokens) { - LOG.info("Token kind is " + token.getKind().toString() + LOG.debug("Token kind is " + token.getKind().toString() + " and the token's service name is " + token.getService()); if (ClientTokenIdentifier.KIND_NAME.equals(token.getKind()) && service.equals(token.getService())) { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CSQueueUtils.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CSQueueUtils.java index bd9e7615b6d..13491208dde 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CSQueueUtils.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CSQueueUtils.java @@ -1,3 +1,20 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; class CSQueueUtils {