HADOOP-6906. FileContext copy() utility doesn't work with recursive copying of directories. (vinod k v via mahadev)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@987374 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0acb205a4b
commit
ad5306f24c
|
@ -205,6 +205,9 @@ Trunk (unreleased changes)
|
|||
HADOOP-6482. GenericOptionsParser constructor that takes Options and
|
||||
String[] ignores options. (Eli Collins via jghoman)
|
||||
|
||||
HADOOP-6906. FileContext copy() utility doesn't work with recursive
|
||||
copying of directories. (vinod k v via mahadev)
|
||||
|
||||
Release 0.21.0 - Unreleased
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.fs;
|
|||
|
||||
import java.io.*;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
|
||||
|
@ -478,4 +479,19 @@ public abstract class ChecksumFs extends FilterFs {
|
|||
long inPos, FSDataInputStream sums, long sumsPos) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FileStatus[] listStatus(Path f) throws IOException,
|
||||
UnresolvedLinkException {
|
||||
ArrayList<FileStatus> results = new ArrayList<FileStatus>();
|
||||
FileStatus[] listing = getMyFs().listStatus(f);
|
||||
if (listing != null) {
|
||||
for (int i = 0; i < listing.length; i++) {
|
||||
if (!isChecksumFile(listing[i].getPath())) {
|
||||
results.add(listing[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results.toArray(new FileStatus[results.size()]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2017,8 +2017,8 @@ public final class FileContext {
|
|||
mkdir(qDst, FsPermission.getDefault(), true);
|
||||
FileStatus[] contents = listStatus(qSrc);
|
||||
for (FileStatus content : contents) {
|
||||
copy(content.getPath(), new Path(qDst, content.getPath()),
|
||||
deleteSource, overwrite);
|
||||
copy(makeQualified(content.getPath()), makeQualified(new Path(qDst,
|
||||
content.getPath().getName())), deleteSource, overwrite);
|
||||
}
|
||||
} else {
|
||||
InputStream in=null;
|
||||
|
@ -2062,7 +2062,8 @@ public final class FileContext {
|
|||
// Recurse to check if dst/srcName exists.
|
||||
checkDest(null, new Path(dst, srcName), overwrite);
|
||||
} else if (!overwrite) {
|
||||
throw new IOException("Target " + dst + " already exists");
|
||||
throw new IOException("Target " + new Path(dst, srcName)
|
||||
+ " already exists");
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
// dst does not exist - OK to copy.
|
||||
|
@ -2098,8 +2099,9 @@ public final class FileContext {
|
|||
private static boolean isSameFS(Path qualPath1, Path qualPath2) {
|
||||
URI srcUri = qualPath1.toUri();
|
||||
URI dstUri = qualPath2.toUri();
|
||||
return (srcUri.getAuthority().equals(dstUri.getAuthority()) && srcUri
|
||||
.getAuthority().equals(dstUri.getAuthority()));
|
||||
return (srcUri.getScheme().equals(dstUri.getScheme()) &&
|
||||
!(srcUri.getAuthority() != null && dstUri.getAuthority() != null && srcUri
|
||||
.getAuthority().equals(dstUri.getAuthority())));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2176,7 +2178,7 @@ public final class FileContext {
|
|||
// NB: More than one AbstractFileSystem can match a scheme, eg
|
||||
// "file" resolves to LocalFs but could have come by RawLocalFs.
|
||||
AbstractFileSystem fs = fc.getFSofPath(p);
|
||||
|
||||
|
||||
// Loop until all symlinks are resolved or the limit is reached
|
||||
for (boolean isLink = true; isLink;) {
|
||||
try {
|
||||
|
|
|
@ -17,15 +17,17 @@
|
|||
*/
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
import static org.apache.hadoop.fs.FileContextTestHelper.getTestRootPath;
|
||||
import static org.apache.hadoop.fs.FileContextTestHelper.readFile;
|
||||
import static org.apache.hadoop.fs.FileContextTestHelper.writeFile;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import static org.apache.hadoop.fs.FileContextTestHelper.*;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -80,4 +82,27 @@ public abstract class FileContextUtilBase {
|
|||
assertTrue("Copied files does not match ",Arrays.equals(ts.getBytes(),
|
||||
readFile(fc,file2,ts.getBytes().length)));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRecursiveFcCopy() throws Exception {
|
||||
|
||||
final String ts = "some random text";
|
||||
Path dir1 = getTestRootPath(fc, "dir1");
|
||||
Path dir2 = getTestRootPath(fc, "dir2");
|
||||
|
||||
Path file1 = new Path(dir1, "file1");
|
||||
fc.mkdir(dir1, null, false);
|
||||
writeFile(fc, file1, ts.getBytes());
|
||||
assertTrue(fc.util().exists(file1));
|
||||
|
||||
Path file2 = new Path(dir2, "file1");
|
||||
|
||||
fc.util().copy(dir1, dir2);
|
||||
|
||||
// verify that newly copied file2 exists
|
||||
assertTrue("Failed to copy file2 ", fc.util().exists(file2));
|
||||
// verify that file2 contains test string
|
||||
assertTrue("Copied files does not match ",Arrays.equals(ts.getBytes(),
|
||||
readFile(fc,file2,ts.getBytes().length)));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue