diff --git a/CHANGES.txt b/CHANGES.txt
index 73eb794433d..155a3a0e182 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -204,6 +204,9 @@ Trunk (unreleased changes)
HADOOP-6646. Move HarfileSystem out of Hadoop Common. (mahadev)
+ HADOOP-6566. Add methods supporting, enforcing narrower permissions on
+ local daemon directories. (Arun Murthy and Luke Lu via cdouglas)
+
OPTIMIZATIONS
HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..).
diff --git a/src/java/org/apache/hadoop/fs/permission/FsPermission.java b/src/java/org/apache/hadoop/fs/permission/FsPermission.java
index 50cf61816a8..572441c5b6b 100644
--- a/src/java/org/apache/hadoop/fs/permission/FsPermission.java
+++ b/src/java/org/apache/hadoop/fs/permission/FsPermission.java
@@ -90,6 +90,15 @@ public class FsPermission implements Writable {
this.otheraction = other.otheraction;
}
+ /**
+ * Construct by given mode, either in octal or symbolic format.
+ * @param mode mode as a string, either in octal or symbolic format
+ * @throws IllegalArgumentException if mode
is invalid
+ */
+ public FsPermission(String mode) {
+ this(new UmaskParser(mode).getUMask());
+ }
+
/** Return user {@link FsAction}. */
public FsAction getUserAction() {return useraction;}
@@ -199,7 +208,7 @@ public class FsPermission implements Writable {
if(conf.deprecatedKeyWasSet(DEPRECATED_UMASK_LABEL))
umask = Integer.parseInt(confUmask); // Evaluate as decimal value
else
- umask = new UmaskParser(confUmask).getUMask();
+ return new FsPermission(confUmask);
} catch(IllegalArgumentException iae) {
// Provide more explanation for user-facing message
String type = iae instanceof NumberFormatException ? "decimal"
diff --git a/src/java/org/apache/hadoop/util/DiskChecker.java b/src/java/org/apache/hadoop/util/DiskChecker.java
index 4c471dbce83..6b8652b7f91 100644
--- a/src/java/org/apache/hadoop/util/DiskChecker.java
+++ b/src/java/org/apache/hadoop/util/DiskChecker.java
@@ -21,6 +21,12 @@ package org.apache.hadoop.util;
import java.io.File;
import java.io.IOException;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+
/**
* Class that provides utility functions for checking disk problem
*/
@@ -68,6 +74,11 @@ public class DiskChecker {
(canonDir.mkdir() || canonDir.exists()));
}
+ /**
+ * Create the directory if it doesn't exist and
+ * @param dir
+ * @throws DiskErrorException
+ */
public static void checkDir(File dir) throws DiskErrorException {
if (!mkdirsWithExistsCheck(dir))
throw new DiskErrorException("can not create directory: "
@@ -86,4 +97,70 @@ public class DiskChecker {
+ dir.toString());
}
+ /**
+ * Create the directory or check permissions if it already exists.
+ *
+ * The semantics of mkdirsWithExistsAndPermissionCheck method is different
+ * from the mkdirs method provided in the Sun's java.io.File class in the
+ * following way:
+ * While creating the non-existent parent directories, this method checks for
+ * the existence of those directories if the mkdir fails at any point (since
+ * that directory might have just been created by some other process).
+ * If both mkdir() and the exists() check fails for any seemingly
+ * non-existent directory, then we signal an error; Sun's mkdir would signal
+ * an error (return false) if a directory it is attempting to create already
+ * exists or the mkdir fails.
+ *
+ * @param localFS local filesystem
+ * @param dir directory to be created or checked
+ * @param expected expected permission
+ * @throws IOException
+ */
+ public static void mkdirsWithExistsAndPermissionCheck(
+ LocalFileSystem localFS, Path dir, FsPermission expected)
+ throws IOException {
+ File directory = localFS.pathToFile(dir);
+ boolean created = false;
+
+ if (!directory.exists())
+ created = mkdirsWithExistsCheck(directory);
+
+ if (created || !localFS.getFileStatus(dir).getPermission().equals(expected))
+ localFS.setPermission(dir, expected);
+ }
+
+ /**
+ * Create the local directory if necessary, check permissions and also ensure
+ * it can be read from and written into.
+ *
+ * @param localFS local filesystem
+ * @param dir directory
+ * @param expected permission
+ * @throws DiskErrorException
+ * @throws IOException
+ */
+ public static void checkDir(LocalFileSystem localFS, Path dir,
+ FsPermission expected)
+ throws DiskErrorException, IOException {
+ mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
+
+ FileStatus stat = localFS.getFileStatus(dir);
+ FsPermission actual = stat.getPermission();
+
+ if (!stat.isDir())
+ throw new DiskErrorException("not a directory: "+ dir.toString());
+
+ FsAction user = actual.getUserAction();
+ if (!user.implies(FsAction.READ))
+ throw new DiskErrorException("directory is not readable: "
+ + dir.toString());
+
+ if (!user.implies(FsAction.WRITE))
+ throw new DiskErrorException("directory is not writable: "
+ + dir.toString());
+
+ if (!user.implies(FsAction.EXECUTE))
+ throw new DiskErrorException("directory is not listable: "
+ + dir.toString());
+ }
}
diff --git a/src/test/core/org/apache/hadoop/test/MockitoMaker.java b/src/test/core/org/apache/hadoop/test/MockitoMaker.java
new file mode 100644
index 00000000000..28c2011c264
--- /dev/null
+++ b/src/test/core/org/apache/hadoop/test/MockitoMaker.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import static org.mockito.Mockito.*;
+
+/**
+ * Helper class to create one-liner stubs, so that instead of:
+ * SomeType someDescriptiveMock = mock(SomeType.class); + * when(someDescriptiveMock.someMethod()).thenReturn(someValue);+ *
You can now do:
+ * SomeType someDescriptiveMock = make(stub(SomeType.class) + * .returning(someValue).from.someMethod());+ */ +public class MockitoMaker { + + /** + * Create a mock object from a mocked method call. + * + * @param