svn merge -c 1309453 from trunk FIXES: MAPREDUCE-3082. Archive command take wrong path for input file with current directory (John George via bobby)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1309454 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
378ed9b673
commit
954327f770
|
@ -149,6 +149,9 @@ Release 0.23.3 - UNRELEASED
|
||||||
MAPREDUCE-3682 Tracker URL says AM tasks run on localhost.
|
MAPREDUCE-3682 Tracker URL says AM tasks run on localhost.
|
||||||
(Ravi Prakash via tgraves)
|
(Ravi Prakash via tgraves)
|
||||||
|
|
||||||
|
MAPREDUCE-3082. Archive command take wrong path for input file with current
|
||||||
|
directory (John George via bobby)
|
||||||
|
|
||||||
Release 0.23.2 - UNRELEASED
|
Release 0.23.2 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -830,11 +830,18 @@ public class HadoopArchives implements Tool {
|
||||||
throw new IOException("Parent path not specified.");
|
throw new IOException("Parent path not specified.");
|
||||||
}
|
}
|
||||||
parentPath = new Path(args[i+1]);
|
parentPath = new Path(args[i+1]);
|
||||||
|
if (!parentPath.isAbsolute()) {
|
||||||
|
parentPath= parentPath.getFileSystem(getConf()).makeQualified(parentPath);
|
||||||
|
}
|
||||||
|
|
||||||
i+=2;
|
i+=2;
|
||||||
//read the rest of the paths
|
//read the rest of the paths
|
||||||
for (; i < args.length; i++) {
|
for (; i < args.length; i++) {
|
||||||
if (i == (args.length - 1)) {
|
if (i == (args.length - 1)) {
|
||||||
destPath = new Path(args[i]);
|
destPath = new Path(args[i]);
|
||||||
|
if (!destPath.isAbsolute()) {
|
||||||
|
destPath = destPath.getFileSystem(getConf()).makeQualified(destPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Path argPath = new Path(args[i]);
|
Path argPath = new Path(args[i]);
|
||||||
|
|
|
@ -104,6 +104,41 @@ public class TestHadoopArchives extends TestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void testRelativePath() throws Exception {
|
||||||
|
fs.delete(archivePath, true);
|
||||||
|
|
||||||
|
final Path sub1 = new Path(inputPath, "dir1");
|
||||||
|
fs.mkdirs(sub1);
|
||||||
|
createFile(sub1, "a", fs);
|
||||||
|
final Configuration conf = mapred.createJobConf();
|
||||||
|
final FsShell shell = new FsShell(conf);
|
||||||
|
|
||||||
|
final List<String> originalPaths = lsr(shell, "input");
|
||||||
|
System.out.println("originalPath: " + originalPaths);
|
||||||
|
final URI uri = fs.getUri();
|
||||||
|
final String prefix = "har://hdfs-" + uri.getHost() +":" + uri.getPort()
|
||||||
|
+ archivePath.toUri().getPath() + Path.SEPARATOR;
|
||||||
|
|
||||||
|
{
|
||||||
|
final String harName = "foo.har";
|
||||||
|
final String[] args = {
|
||||||
|
"-archiveName",
|
||||||
|
harName,
|
||||||
|
"-p",
|
||||||
|
"input",
|
||||||
|
"*",
|
||||||
|
"archive"
|
||||||
|
};
|
||||||
|
System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH, HADOOP_ARCHIVES_JAR);
|
||||||
|
final HadoopArchives har = new HadoopArchives(mapred.createJobConf());
|
||||||
|
assertEquals(0, ToolRunner.run(har, args));
|
||||||
|
|
||||||
|
//compare results
|
||||||
|
final List<String> harPaths = lsr(shell, prefix + harName);
|
||||||
|
assertEquals(originalPaths, harPaths);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testPathWithSpaces() throws Exception {
|
public void testPathWithSpaces() throws Exception {
|
||||||
fs.delete(archivePath, true);
|
fs.delete(archivePath, true);
|
||||||
|
|
||||||
|
@ -170,8 +205,11 @@ public class TestHadoopArchives extends TestCase {
|
||||||
System.setErr(oldErr);
|
System.setErr(oldErr);
|
||||||
}
|
}
|
||||||
System.out.println("lsr results:\n" + results);
|
System.out.println("lsr results:\n" + results);
|
||||||
|
String dirname = dir;
|
||||||
|
if (dir.lastIndexOf(Path.SEPARATOR) != -1 ) {
|
||||||
|
dirname = dir.substring(dir.lastIndexOf(Path.SEPARATOR));
|
||||||
|
}
|
||||||
|
|
||||||
final String dirname = dir.substring(dir.lastIndexOf(Path.SEPARATOR));
|
|
||||||
final List<String> paths = new ArrayList<String>();
|
final List<String> paths = new ArrayList<String>();
|
||||||
for(StringTokenizer t = new StringTokenizer(results, "\n");
|
for(StringTokenizer t = new StringTokenizer(results, "\n");
|
||||||
t.hasMoreTokens(); ) {
|
t.hasMoreTokens(); ) {
|
||||||
|
|
Loading…
Reference in New Issue