Merge -r 1173450:1173451 from trunk to branch-0.23 to fix MAPREDUCE-3018.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1173452 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Arun Murthy 2011-09-21 01:11:42 +00:00
parent b5c662f0e6
commit 6c4cdbac09
2 changed files with 19 additions and 8 deletions

View File

@ -1338,6 +1338,8 @@ Release 0.23.0 - Unreleased
YarnClientProtocolProvider and ensured MiniMRYarnCluster sets JobHistory YarnClientProtocolProvider and ensured MiniMRYarnCluster sets JobHistory
configuration for tests. (acmurthy) configuration for tests. (acmurthy)
MAPREDUCE-3018. Fixed -file option for streaming. (mahadev via acmurthy)
Release 0.22.0 - Unreleased Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -22,8 +22,10 @@ import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -43,6 +45,7 @@ import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.filecache.DistributedCache; import org.apache.hadoop.mapreduce.filecache.DistributedCache;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileInputFormat;
@ -277,19 +280,25 @@ public class StreamJob implements Tool {
if (values != null && values.length > 0) { if (values != null && values.length > 0) {
LOG.warn("-file option is deprecated, please use generic option" + LOG.warn("-file option is deprecated, please use generic option" +
" -files instead."); " -files instead.");
StringBuilder unpackRegex = new StringBuilder(
config_.getPattern(MRJobConfig.JAR_UNPACK_PATTERN, String fileList = null;
JobConf.UNPACK_JAR_PATTERN_DEFAULT).pattern());
for (String file : values) { for (String file : values) {
packageFiles_.add(file); packageFiles_.add(file);
String fname = new File(file).getName(); try {
unpackRegex.append("|(?:").append(Pattern.quote(fname)).append(")"); URI pathURI = new URI(file);
Path path = new Path(pathURI);
FileSystem localFs = FileSystem.getLocal(config_);
String finalPath = path.makeQualified(localFs).toString();
fileList = fileList == null ? finalPath : fileList + "," + finalPath;
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
} }
config_.setPattern(MRJobConfig.JAR_UNPACK_PATTERN, config_.set("tmpfiles", config_.get("tmpfiles", "") +
Pattern.compile(unpackRegex.toString())); (fileList == null ? "" : fileList));
validate(packageFiles_); validate(packageFiles_);
} }
String fsName = cmdLine.getOptionValue("dfs"); String fsName = cmdLine.getOptionValue("dfs");
if (null != fsName){ if (null != fsName){
LOG.warn("-dfs option is deprecated, please use -fs instead."); LOG.warn("-dfs option is deprecated, please use -fs instead.");