diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 776b77c38dd..4a142a87675 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -83,17 +83,20 @@ Trunk (unreleased changes) kerberos. (jitendra) BUG FIXES + HADOOP-8013. ViewFileSystem does not honor setVerifyChecksum + (Daryn Sharp via bobby) + HADOOP-8018. Hudson auto test for HDFS has started throwing javadoc (Jon Eagles via bobby) HADOOP-8001 ChecksumFileSystem's rename doesn't correctly handle checksum - files. (Daryn Sharp via bobby) + files. (Daryn Sharp via bobby) HADOOP-8006 TestFSInputChecker is failing in trunk. - (Daryn Sharp via bobby) + (Daryn Sharp via bobby) HADOOP-7998. CheckFileSystem does not correctly honor setVerifyChecksum - (Daryn Sharp via bobby) + (Daryn Sharp via bobby) HADOOP-7851. Configuration.getClasses() never returns the default value. (Uma Maheswara Rao G via amarrk) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java index 8df51b3ac82..dde2520041e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java @@ -20,6 +20,7 @@ import java.io.*; import java.net.URI; +import java.net.URISyntaxException; import java.util.EnumSet; import java.util.List; @@ -51,6 +52,7 @@ public class FilterFileSystem extends FileSystem { protected FileSystem fs; + private String swapScheme; /* * so that extending classes can define it @@ -77,7 +79,11 @@ public FileSystem getRawFileSystem() { * @param conf the configuration */ public void initialize(URI name, Configuration conf) throws IOException { - fs.initialize(name, conf); + super.initialize(name, conf); + String scheme = name.getScheme(); + if (!scheme.equals(fs.getUri().getScheme())) { + swapScheme = scheme; + } } /** Returns a URI whose scheme and authority identify this FileSystem.*/ @@ -96,7 +102,19 @@ protected URI getCanonicalUri() { /** Make sure that a path specifies a FileSystem. */ public Path makeQualified(Path path) { - return fs.makeQualified(path); + Path fqPath = fs.makeQualified(path); + // swap in our scheme if the filtered fs is using a different scheme + if (swapScheme != null) { + try { + // NOTE: should deal with authority, but too much other stuff is broken + fqPath = new Path( + new URI(swapScheme, fqPath.toUri().getSchemeSpecificPart(), null) + ); + } catch (URISyntaxException e) { + throw new IllegalArgumentException(e); + } + } + return fqPath; } /////////////////////////////////////////////////////////////// diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java index 9e6c5d665cf..88ee7b82242 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java @@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; /**************************************************************** * Implement the FileSystem API for the checksumed local filesystem. @@ -34,21 +35,26 @@ public class LocalFileSystem extends ChecksumFileSystem { static final URI NAME = URI.create("file:///"); static private Random rand = new Random(); - FileSystem rfs; public LocalFileSystem() { this(new RawLocalFileSystem()); } public FileSystem getRaw() { - return rfs; + return getRawFileSystem(); } public LocalFileSystem(FileSystem rawLocalFileSystem) { super(rawLocalFileSystem); - rfs = rawLocalFileSystem; } + @Override + public void initialize(URI uri, Configuration conf) throws IOException { + super.initialize(uri, conf); + // ctor didn't initialize the filtered fs + getRawFileSystem().initialize(uri, conf); + } + /** Convert a path to a File. */ public File pathToFile(Path path) { return ((RawLocalFileSystem)fs).pathToFile(path); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java index 84eb55ce13b..18ec724b7aa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java @@ -19,8 +19,6 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; -import java.net.URISyntaxException; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -82,37 +80,16 @@ protected Path fullPath(final Path path) { /** * Constructor - * @param fs base file system - * @param theRoot chRoot for this file system - * @throws URISyntaxException + * @param uri base file system + * @param conf configuration + * @throws IOException */ - public ChRootedFileSystem(final FileSystem fs, final Path theRoot) - throws URISyntaxException { - super(fs); - makeQualified(theRoot); //check that root is a valid path for fs - // Would like to call myFs.checkPath(theRoot); - // but not public - chRootPathPart = new Path(theRoot.toUri().getPath()); + public ChRootedFileSystem(final URI uri, Configuration conf) + throws IOException { + super(FileSystem.get(uri, conf)); + chRootPathPart = new Path(uri.getPath()); chRootPathPartString = chRootPathPart.toUri().getPath(); - try { - initialize(fs.getUri(), fs.getConf()); - } catch (IOException e) { // This exception should not be thrown - throw new RuntimeException("This should not occur"); - } - - /* - * We are making URI include the chrootedPath: e.g. file:///chrootedPath. - * This is questionable since Path#makeQualified(uri, path) ignores - * the pathPart of a uri. Since this class is internal we can ignore - * this issue but if we were to make it external then this needs - * to be resolved. - */ - // Handle the two cases: - // scheme:/// and scheme://authority/ - myUri = new URI(fs.getUri().toString() + - (fs.getUri().getAuthority() == null ? "" : Path.SEPARATOR) + - chRootPathPart.toString().substring(1)); - + myUri = uri; workingDir = getHomeDirectory(); // We don't use the wd of the myFs } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java index 0778e7f9eff..1addf2fd88b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java @@ -168,8 +168,7 @@ public void initialize(final URI theUri, final Configuration conf) protected FileSystem getTargetFileSystem(final URI uri) throws URISyntaxException, IOException { - return new ChRootedFileSystem(FileSystem.get(uri, config), - new Path(uri.getPath())); + return new ChRootedFileSystem(uri, config); } @Override @@ -464,8 +463,11 @@ public void setTimes(final Path f, final long mtime, final long atime) @Override public void setVerifyChecksum(final boolean verifyChecksum) { - // This is a file system level operations, however ViewFileSystem - // points to many file systems. Noop for ViewFileSystem. + List> mountPoints = + fsState.getMountPoints(); + for (InodeTree.MountPoint mount : mountPoints) { + mount.target.targetFileSystem.setVerifyChecksum(verifyChecksum); + } } public MountPoint[] getMountPoints() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java index b2a9e16038d..05fec95631d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java @@ -72,14 +72,15 @@ public static Path getTestRootPath(FileSystem fSys, String pathString) { public static String getAbsoluteTestRootDir(FileSystem fSys) throws IOException { - if (absTestRootDir == null) { + // NOTE: can't cache because of different filesystems! + //if (absTestRootDir == null) if (TEST_ROOT_DIR.startsWith("/")) { absTestRootDir = TEST_ROOT_DIR; } else { absTestRootDir = fSys.getWorkingDirectory().toString() + "/" + TEST_ROOT_DIR; } - } + //} return absTestRootDir; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java index d1e9f598e66..c46ab96f375 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java @@ -51,7 +51,7 @@ public void setUp() throws Exception { // ChRoot to the root of the testDirectory - fSys = new ChRootedFileSystem(fSysTarget, chrootedTo); + fSys = new ChRootedFileSystem(chrootedTo.toUri(), conf); } @After