MAPREDUCE-3744. Fix the yarn logs command line. Improve error messages for mapred job -logs. (Contributed by Jason Lowe)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1239433 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Siddharth Seth 2012-02-02 01:43:49 +00:00
parent ef1a619a4d
commit 6ba0375b21
4 changed files with 87 additions and 20 deletions

View File

@ -654,6 +654,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3752. Modified application limits to include queue max-capacities MAPREDUCE-3752. Modified application limits to include queue max-capacities
besides the usual user limits. (Arun C Murthy via vinodkv) besides the usual user limits. (Arun C Murthy via vinodkv)
MAPREDUCE-3744. Fix the yarn logs command line. Improve error messages for
mapred job -logs. (Jason Lowe via sseth)
Release 0.23.0 - 2011-11-01 Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -345,7 +345,7 @@ public class CLI extends Configured implements Tool {
LogParams logParams = cluster.getLogParams(jobID, taskAttemptID); LogParams logParams = cluster.getLogParams(jobID, taskAttemptID);
LogDumper logDumper = new LogDumper(); LogDumper logDumper = new LogDumper();
logDumper.setConf(getConf()); logDumper.setConf(getConf());
logDumper.dumpAContainersLogs(logParams.getApplicationId(), exitCode = logDumper.dumpAContainersLogs(logParams.getApplicationId(),
logParams.getContainerId(), logParams.getNodeId(), logParams.getContainerId(), logParams.getNodeId(),
logParams.getOwner()); logParams.getOwner());
} catch (IOException e) { } catch (IOException e) {

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.logaggregation;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.EOFException; import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
@ -103,14 +104,15 @@ public class LogDumper extends Configured implements Tool {
if (appOwner == null || appOwner.isEmpty()) { if (appOwner == null || appOwner.isEmpty()) {
appOwner = UserGroupInformation.getCurrentUser().getShortUserName(); appOwner = UserGroupInformation.getCurrentUser().getShortUserName();
} }
int resultCode = 0;
if (containerIdStr == null && nodeAddress == null) { if (containerIdStr == null && nodeAddress == null) {
dumpAllContainersLogs(appId, appOwner, out); resultCode = dumpAllContainersLogs(appId, appOwner, out);
} else if ((containerIdStr == null && nodeAddress != null) } else if ((containerIdStr == null && nodeAddress != null)
|| (containerIdStr != null && nodeAddress == null)) { || (containerIdStr != null && nodeAddress == null)) {
System.out.println("ContainerId or NodeAddress cannot be null!"); System.out.println("ContainerId or NodeAddress cannot be null!");
HelpFormatter formatter = new HelpFormatter(); HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("general options are: ", opts); formatter.printHelp("general options are: ", opts);
return -1; resultCode = -1;
} else { } else {
Path remoteRootLogDir = Path remoteRootLogDir =
new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
@ -122,27 +124,33 @@ public class LogDumper extends Configured implements Tool {
appId, appId,
appOwner, appOwner,
ConverterUtils.toNodeId(nodeAddress), ConverterUtils.toNodeId(nodeAddress),
getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX, LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf())));
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX))); resultCode = dumpAContainerLogs(containerIdStr, reader, out);
return dumpAContainerLogs(containerIdStr, reader, out);
} }
return 0; return resultCode;
} }
public void dumpAContainersLogs(String appId, String containerId, public int dumpAContainersLogs(String appId, String containerId,
String nodeId, String jobOwner) throws IOException { String nodeId, String jobOwner) throws IOException {
Path remoteRootLogDir = Path remoteRootLogDir =
new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
String suffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf()); String suffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf());
AggregatedLogFormat.LogReader reader = Path logPath = LogAggregationUtils.getRemoteNodeLogFileForApp(
new AggregatedLogFormat.LogReader(getConf(), remoteRootLogDir, ConverterUtils.toApplicationId(appId), jobOwner,
LogAggregationUtils.getRemoteNodeLogFileForApp(remoteRootLogDir, ConverterUtils.toNodeId(nodeId), suffix);
ConverterUtils.toApplicationId(appId), jobOwner, AggregatedLogFormat.LogReader reader;
ConverterUtils.toNodeId(nodeId), suffix)); try {
reader = new AggregatedLogFormat.LogReader(getConf(), logPath);
} catch (FileNotFoundException fnfe) {
System.out.println("Logs not available at " + logPath.toString());
System.out.println(
"Log aggregation has not completed or is not enabled.");
return -1;
}
DataOutputStream out = new DataOutputStream(System.out); DataOutputStream out = new DataOutputStream(System.out);
dumpAContainerLogs(containerId, reader, out); return dumpAContainerLogs(containerId, reader, out);
} }
private int dumpAContainerLogs(String containerIdStr, private int dumpAContainerLogs(String containerIdStr,
@ -174,21 +182,28 @@ public class LogDumper extends Configured implements Tool {
return 0; return 0;
} }
private void dumpAllContainersLogs(ApplicationId appId, String appOwner, private int dumpAllContainersLogs(ApplicationId appId, String appOwner,
DataOutputStream out) throws IOException { DataOutputStream out) throws IOException {
Path remoteRootLogDir = Path remoteRootLogDir =
new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
String user = appOwner; String user = appOwner;
String logDirSuffix = String logDirSuffix =
getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf());
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
//TODO Change this to get a list of files from the LAS. //TODO Change this to get a list of files from the LAS.
Path remoteAppLogDir = Path remoteAppLogDir =
LogAggregationUtils.getRemoteAppLogDir(remoteRootLogDir, appId, user, LogAggregationUtils.getRemoteAppLogDir(remoteRootLogDir, appId, user,
logDirSuffix); logDirSuffix);
RemoteIterator<FileStatus> nodeFiles = RemoteIterator<FileStatus> nodeFiles;
FileContext.getFileContext().listStatus(remoteAppLogDir); try {
nodeFiles = FileContext.getFileContext().listStatus(remoteAppLogDir);
} catch (FileNotFoundException fnf) {
System.out.println("Logs not available at "
+ remoteAppLogDir.toString());
System.out.println(
"Log aggregation has not completed or is not enabled.");
return -1;
}
while (nodeFiles.hasNext()) { while (nodeFiles.hasNext()) {
FileStatus thisNodeFile = nodeFiles.next(); FileStatus thisNodeFile = nodeFiles.next();
AggregatedLogFormat.LogReader reader = AggregatedLogFormat.LogReader reader =
@ -217,12 +232,14 @@ public class LogDumper extends Configured implements Tool {
reader.close(); reader.close();
} }
} }
return 0;
} }
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
Configuration conf = new YarnConfiguration(); Configuration conf = new YarnConfiguration();
LogDumper logDumper = new LogDumper(); LogDumper logDumper = new LogDumper();
logDumper.setConf(conf); logDumper.setConf(conf);
logDumper.run(args); int exitCode = logDumper.run(args);
System.exit(exitCode);
} }
} }

View File

@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.logaggregation;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.junit.Test;
public class TestLogDumper {
@Test
public void testFailResultCodes() throws Exception {
Configuration conf = new YarnConfiguration();
conf.setClass("fs.file.impl", LocalFileSystem.class, FileSystem.class);
LogDumper dumper = new LogDumper();
dumper.setConf(conf);
// verify dumping a non-existent application's logs returns a failure code
int exitCode = dumper.run( new String[] {
"-applicationId", "application_0_0" } );
assertTrue("Should return an error code", exitCode != 0);
// verify dumping a non-existent container log is a failure code
exitCode = dumper.dumpAContainersLogs("application_0_0", "container_0_0",
"nonexistentnode:1234", "nobody");
assertTrue("Should return an error code", exitCode != 0);
}
}