HADOOP-6566. Add methods supporting, enforcing narrower permissions on local daemon directories.

Contributed by Arun Murthy and Luke Lu


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@927728 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Christopher Douglas 2010-03-26 08:25:00 +00:00
parent 980f819047
commit f4d31cd029
5 changed files with 349 additions and 1 deletions

View File

@ -204,6 +204,9 @@ Trunk (unreleased changes)
HADOOP-6646. Move HarfileSystem out of Hadoop Common. (mahadev) HADOOP-6646. Move HarfileSystem out of Hadoop Common. (mahadev)
HADOOP-6566. Add methods supporting, enforcing narrower permissions on
local daemon directories. (Arun Murthy and Luke Lu via cdouglas)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..). HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..).

View File

@ -90,6 +90,15 @@ public FsPermission(FsPermission other) {
this.otheraction = other.otheraction; this.otheraction = other.otheraction;
} }
/**
* Construct by given mode, either in octal or symbolic format.
* @param mode mode as a string, either in octal or symbolic format
* @throws IllegalArgumentException if <code>mode</code> is invalid
*/
public FsPermission(String mode) {
this(new UmaskParser(mode).getUMask());
}
/** Return user {@link FsAction}. */ /** Return user {@link FsAction}. */
public FsAction getUserAction() {return useraction;} public FsAction getUserAction() {return useraction;}
@ -199,7 +208,7 @@ public static FsPermission getUMask(Configuration conf) {
if(conf.deprecatedKeyWasSet(DEPRECATED_UMASK_LABEL)) if(conf.deprecatedKeyWasSet(DEPRECATED_UMASK_LABEL))
umask = Integer.parseInt(confUmask); // Evaluate as decimal value umask = Integer.parseInt(confUmask); // Evaluate as decimal value
else else
umask = new UmaskParser(confUmask).getUMask(); return new FsPermission(confUmask);
} catch(IllegalArgumentException iae) { } catch(IllegalArgumentException iae) {
// Provide more explanation for user-facing message // Provide more explanation for user-facing message
String type = iae instanceof NumberFormatException ? "decimal" String type = iae instanceof NumberFormatException ? "decimal"

View File

@ -21,6 +21,12 @@
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
/** /**
* Class that provides utility functions for checking disk problem * Class that provides utility functions for checking disk problem
*/ */
@ -68,6 +74,11 @@ public static boolean mkdirsWithExistsCheck(File dir) {
(canonDir.mkdir() || canonDir.exists())); (canonDir.mkdir() || canonDir.exists()));
} }
/**
* Create the directory if it doesn't exist and
* @param dir
* @throws DiskErrorException
*/
public static void checkDir(File dir) throws DiskErrorException { public static void checkDir(File dir) throws DiskErrorException {
if (!mkdirsWithExistsCheck(dir)) if (!mkdirsWithExistsCheck(dir))
throw new DiskErrorException("can not create directory: " throw new DiskErrorException("can not create directory: "
@ -86,4 +97,70 @@ public static void checkDir(File dir) throws DiskErrorException {
+ dir.toString()); + dir.toString());
} }
/**
* Create the directory or check permissions if it already exists.
*
* The semantics of mkdirsWithExistsAndPermissionCheck method is different
* from the mkdirs method provided in the Sun's java.io.File class in the
* following way:
* While creating the non-existent parent directories, this method checks for
* the existence of those directories if the mkdir fails at any point (since
* that directory might have just been created by some other process).
* If both mkdir() and the exists() check fails for any seemingly
* non-existent directory, then we signal an error; Sun's mkdir would signal
* an error (return false) if a directory it is attempting to create already
* exists or the mkdir fails.
*
* @param localFS local filesystem
* @param dir directory to be created or checked
* @param expected expected permission
* @throws IOException
*/
public static void mkdirsWithExistsAndPermissionCheck(
LocalFileSystem localFS, Path dir, FsPermission expected)
throws IOException {
File directory = localFS.pathToFile(dir);
boolean created = false;
if (!directory.exists())
created = mkdirsWithExistsCheck(directory);
if (created || !localFS.getFileStatus(dir).getPermission().equals(expected))
localFS.setPermission(dir, expected);
}
/**
* Create the local directory if necessary, check permissions and also ensure
* it can be read from and written into.
*
* @param localFS local filesystem
* @param dir directory
* @param expected permission
* @throws DiskErrorException
* @throws IOException
*/
public static void checkDir(LocalFileSystem localFS, Path dir,
FsPermission expected)
throws DiskErrorException, IOException {
mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
FileStatus stat = localFS.getFileStatus(dir);
FsPermission actual = stat.getPermission();
if (!stat.isDir())
throw new DiskErrorException("not a directory: "+ dir.toString());
FsAction user = actual.getUserAction();
if (!user.implies(FsAction.READ))
throw new DiskErrorException("directory is not readable: "
+ dir.toString());
if (!user.implies(FsAction.WRITE))
throw new DiskErrorException("directory is not writable: "
+ dir.toString());
if (!user.implies(FsAction.EXECUTE))
throw new DiskErrorException("directory is not listable: "
+ dir.toString());
}
} }

View File

@ -0,0 +1,132 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.test;
import static org.mockito.Mockito.*;
/**
* Helper class to create one-liner stubs, so that instead of: <pre>
* SomeType someDescriptiveMock = mock(SomeType.class);
* when(someDescriptiveMock.someMethod()).thenReturn(someValue);</pre>
* <p>You can now do: <pre>
* SomeType someDescriptiveMock = make(stub(SomeType.class)
* .returning(someValue).from.someMethod());</pre>
*/
public class MockitoMaker {
/**
* Create a mock object from a mocked method call.
*
* @param <T> type of mocked object
* @param methodCall for mocked object
* @return mocked object
*/
@SuppressWarnings("unchecked")
public static <T> T make(Object methodCall) {
StubBuilder<T> sb = StubBuilder.current();
when(methodCall).thenReturn(sb.firstReturn, sb.laterReturns);
return (T) StubBuilder.current().from;
}
/**
* Create a stub builder of a mocked object.
*
* @param <T> type of the target object to be mocked
* @param target class of the target object to be mocked
* @return the stub builder of the mocked object
*/
public static <T> StubBuilder<T> stub(Class<T> target) {
return new StubBuilder<T>(mock(target));
}
/**
* Builder class for stubs
* @param <T> type of the object to be mocked
*/
public static class StubBuilder<T> {
/**
* The target mock object
*/
public final T from;
// We want to be able to use this even when the tests are run in parallel.
@SuppressWarnings("rawtypes")
private static final ThreadLocal<StubBuilder> tls =
new ThreadLocal<StubBuilder>() {
@Override protected StubBuilder initialValue() {
return new StubBuilder();
}
};
private Object firstReturn = null;
private Object[] laterReturns = {};
/**
* Default constructor for the initial stub builder
*/
public StubBuilder() {
this.from = null;
}
/**
* Construct a stub builder with a mock instance
*
* @param mockInstance the mock object
*/
public StubBuilder(T mockInstance) {
tls.set(this);
this.from = mockInstance;
}
/**
* Get the current stub builder from thread local
*
* @param <T>
* @return the stub builder of the mocked object
*/
@SuppressWarnings("unchecked")
public static <T> StubBuilder<T> current() {
return tls.get();
}
/**
* Set the return value for the current stub builder
*
* @param value the return value
* @return the stub builder
*/
public StubBuilder<T> returning(Object value) {
this.firstReturn = value;
return this;
}
/**
* Set the return values for the current stub builder
*
* @param value the first return value
* @param values the return values for later invocations
* @return the stub builder
*/
public StubBuilder<T> returning(Object value, Object... values) {
this.firstReturn = value;
this.laterReturns = values;
return this;
}
}
}

View File

@ -0,0 +1,127 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import java.io.*;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.apache.hadoop.test.MockitoMaker.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
public class TestDiskChecker {
final FsPermission defaultPerm = new FsPermission("755");
final FsPermission invalidPerm = new FsPermission("000");
@Test public void testMkdirs_dirExists() throws Throwable {
_mkdirs(true, defaultPerm, defaultPerm);
}
@Test public void testMkdirs_noDir() throws Throwable {
_mkdirs(false, defaultPerm, defaultPerm);
}
@Test public void testMkdirs_dirExists_badUmask() throws Throwable {
_mkdirs(true, defaultPerm, invalidPerm);
}
@Test public void testMkdirs_noDir_badUmask() throws Throwable {
_mkdirs(false, defaultPerm, invalidPerm);
}
private void _mkdirs(boolean exists, FsPermission before, FsPermission after)
throws Throwable {
File localDir = make(stub(File.class).returning(exists).from.exists());
when(localDir.mkdir()).thenReturn(true);
Path dir = mock(Path.class); // use default stubs
LocalFileSystem fs = make(stub(LocalFileSystem.class)
.returning(localDir).from.pathToFile(dir));
FileStatus stat = make(stub(FileStatus.class)
.returning(after).from.getPermission());
when(fs.getFileStatus(dir)).thenReturn(stat);
try {
DiskChecker.mkdirsWithExistsAndPermissionCheck(fs, dir, before);
if (!exists)
verify(fs).setPermission(dir, before);
else {
verify(fs).getFileStatus(dir);
verify(stat).getPermission();
}
}
catch (DiskErrorException e) {
if (before != after)
assertTrue(e.getMessage().startsWith("Incorrect permission"));
}
}
@Test public void testCheckDir_normal() throws Throwable {
_checkDirs(true, new FsPermission("755"), true);
}
@Test public void testCheckDir_notDir() throws Throwable {
_checkDirs(false, new FsPermission("000"), false);
}
@Test public void testCheckDir_notReadable() throws Throwable {
_checkDirs(true, new FsPermission("000"), false);
}
@Test public void testCheckDir_notWritable() throws Throwable {
_checkDirs(true, new FsPermission("444"), false);
}
@Test public void testCheckDir_notListable() throws Throwable {
_checkDirs(true, new FsPermission("666"), false); // not listable
}
private void _checkDirs(boolean isDir, FsPermission perm, boolean success)
throws Throwable {
File localDir = make(stub(File.class).returning(true).from.exists());
when(localDir.mkdir()).thenReturn(true);
Path dir = mock(Path.class);
LocalFileSystem fs = make(stub(LocalFileSystem.class)
.returning(localDir).from.pathToFile(dir));
FileStatus stat = make(stub(FileStatus.class)
.returning(perm).from.getPermission());
when(stat.isDir()).thenReturn(isDir);
when(fs.getFileStatus(dir)).thenReturn(stat);
try {
DiskChecker.checkDir(fs, dir, perm);
verify(stat).isDir();
verify(fs, times(2)).getFileStatus(dir);
verify(stat, times(2)).getPermission();
assertTrue("checkDir success", success);
}
catch (DiskErrorException e) {
assertFalse("checkDir success", success);
e.printStackTrace();
}
System.out.println("checkDir success: "+ success);
}
}