HDFS-10832. Propagate ACL bit and isEncrypted bit in HttpFS FileStatus permissions.

(cherry picked from commit cba973f036)
(cherry picked from commit f448ce2a89)
This commit is contained in:
Andrew Wang 2016-09-09 12:12:29 -07:00 committed by Xiao Chen
parent 911ae15f69
commit 8c24388af0
4 changed files with 229 additions and 230 deletions

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.FsPermissionExtension;
import org.apache.hadoop.lib.wsrs.EnumSetParam; import org.apache.hadoop.lib.wsrs.EnumSetParam;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
@ -182,6 +183,8 @@ public class HttpFSFileSystem extends FileSystem
public static final String ACL_ENTRIES_JSON = "entries"; public static final String ACL_ENTRIES_JSON = "entries";
public static final String ACL_BIT_JSON = "aclBit"; public static final String ACL_BIT_JSON = "aclBit";
public static final String ENC_BIT_JSON = "encBit";
public static final int HTTP_TEMPORARY_REDIRECT = 307; public static final int HTTP_TEMPORARY_REDIRECT = 307;
private static final String HTTP_GET = "GET"; private static final String HTTP_GET = "GET";
@ -957,6 +960,21 @@ public class HttpFSFileSystem extends FileSystem
return createAclStatus(json); return createAclStatus(json);
} }
/** Convert a string to a FsPermission object. */
static FsPermission toFsPermission(JSONObject json) {
final String s = (String) json.get(PERMISSION_JSON);
final Boolean aclBit = (Boolean) json.get(ACL_BIT_JSON);
final Boolean encBit = (Boolean) json.get(ENC_BIT_JSON);
FsPermission perm = new FsPermission(Short.parseShort(s, 8));
final boolean aBit = (aclBit != null) ? aclBit : false;
final boolean eBit = (encBit != null) ? encBit : false;
if (aBit || eBit) {
return new FsPermissionExtension(perm, aBit, eBit);
} else {
return perm;
}
}
private FileStatus createFileStatus(Path parent, JSONObject json) { private FileStatus createFileStatus(Path parent, JSONObject json) {
String pathSuffix = (String) json.get(PATH_SUFFIX_JSON); String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix); Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
@ -964,8 +982,7 @@ public class HttpFSFileSystem extends FileSystem
long len = (Long) json.get(LENGTH_JSON); long len = (Long) json.get(LENGTH_JSON);
String owner = (String) json.get(OWNER_JSON); String owner = (String) json.get(OWNER_JSON);
String group = (String) json.get(GROUP_JSON); String group = (String) json.get(GROUP_JSON);
FsPermission permission = final FsPermission permission = toFsPermission(json);
new FsPermission(Short.parseShort((String) json.get(PERMISSION_JSON), 8));
long aTime = (Long) json.get(ACCESS_TIME_JSON); long aTime = (Long) json.get(ACCESS_TIME_JSON);
long mTime = (Long) json.get(MODIFICATION_TIME_JSON); long mTime = (Long) json.get(MODIFICATION_TIME_JSON);
long blockSize = (Long) json.get(BLOCK_SIZE_JSON); long blockSize = (Long) json.get(BLOCK_SIZE_JSON);

View File

@ -31,7 +31,6 @@ import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.AclException;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.lib.service.FileSystemAccess; import org.apache.hadoop.lib.service.FileSystemAccess;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
@ -54,148 +53,59 @@ import java.util.Map.Entry;
public class FSOperations { public class FSOperations {
/** /**
* This class is used to group a FileStatus and an AclStatus together. * @param fileStatus a FileStatus object
* It's needed for the GETFILESTATUS and LISTSTATUS calls, which take * @return JSON map suitable for wire transport
* most info from the FileStatus and a wee bit from the AclStatus.
*/ */
private static class StatusPair { private static Map<String, Object> toJson(FileStatus fileStatus) {
private FileStatus fileStatus; Map<String, Object> json = new LinkedHashMap<>();
private AclStatus aclStatus; json.put(HttpFSFileSystem.FILE_STATUS_JSON, toJsonInner(fileStatus, true));
return json;
/**
* Simple constructor
* @param fileStatus Existing FileStatus object
* @param aclStatus Existing AclStatus object
*/
public StatusPair(FileStatus fileStatus, AclStatus aclStatus) {
this.fileStatus = fileStatus;
this.aclStatus = aclStatus;
}
/**
* Create one StatusPair by performing the underlying calls to
* fs.getFileStatus and fs.getAclStatus
* @param fs The FileSystem where 'path' lives
* @param path The file/directory to query
* @throws IOException
*/
public StatusPair(FileSystem fs, Path path) throws IOException {
fileStatus = fs.getFileStatus(path);
aclStatus = null;
try {
aclStatus = fs.getAclStatus(path);
} catch (AclException e) {
/*
* The cause is almost certainly an "ACLS aren't enabled"
* exception, so leave aclStatus at null and carry on.
*/
} catch (UnsupportedOperationException e) {
/* Ditto above - this is the case for a local file system */
}
}
/**
* Return a Map suitable for conversion into JSON format
* @return The JSONish Map
*/
public Map<String,Object> toJson() {
Map<String,Object> json = new LinkedHashMap<String,Object>();
json.put(HttpFSFileSystem.FILE_STATUS_JSON, toJsonInner(true));
return json;
}
/**
* Return in inner part of the JSON for the status - used by both the
* GETFILESTATUS and LISTSTATUS calls.
* @param emptyPathSuffix Whether or not to include PATH_SUFFIX_JSON
* @return The JSONish Map
*/
public Map<String,Object> toJsonInner(boolean emptyPathSuffix) {
Map<String,Object> json = new LinkedHashMap<String,Object>();
json.put(HttpFSFileSystem.PATH_SUFFIX_JSON,
(emptyPathSuffix) ? "" : fileStatus.getPath().getName());
json.put(HttpFSFileSystem.TYPE_JSON,
HttpFSFileSystem.FILE_TYPE.getType(fileStatus).toString());
json.put(HttpFSFileSystem.LENGTH_JSON, fileStatus.getLen());
json.put(HttpFSFileSystem.OWNER_JSON, fileStatus.getOwner());
json.put(HttpFSFileSystem.GROUP_JSON, fileStatus.getGroup());
json.put(HttpFSFileSystem.PERMISSION_JSON,
HttpFSFileSystem.permissionToString(fileStatus.getPermission()));
json.put(HttpFSFileSystem.ACCESS_TIME_JSON, fileStatus.getAccessTime());
json.put(HttpFSFileSystem.MODIFICATION_TIME_JSON,
fileStatus.getModificationTime());
json.put(HttpFSFileSystem.BLOCK_SIZE_JSON, fileStatus.getBlockSize());
json.put(HttpFSFileSystem.REPLICATION_JSON, fileStatus.getReplication());
if ( (aclStatus != null) && !(aclStatus.getEntries().isEmpty()) ) {
json.put(HttpFSFileSystem.ACL_BIT_JSON,true);
}
return json;
}
} }
/** /**
* Simple class used to contain and operate upon a list of StatusPair * @param fileStatuses list of FileStatus objects
* objects. Used by LISTSTATUS. * @return JSON map suitable for wire transport
*/ */
private static class StatusPairs { @SuppressWarnings({"unchecked"})
private StatusPair[] statusPairs; private static Map<String, Object> toJson(FileStatus[] fileStatuses) {
Map<String, Object> json = new LinkedHashMap<>();
/** Map<String, Object> inner = new LinkedHashMap<>();
* Construct a list of StatusPair objects JSONArray statuses = new JSONArray();
* @param fs The FileSystem where 'path' lives for (FileStatus f : fileStatuses) {
* @param path The directory to query statuses.add(toJsonInner(f, false));
* @param filter A possible filter for entries in the directory
* @throws IOException
*/
public StatusPairs(FileSystem fs, Path path, PathFilter filter)
throws IOException {
/* Grab all the file statuses at once in an array */
FileStatus[] fileStatuses = fs.listStatus(path, filter);
/* We'll have an array of StatusPairs of the same length */
AclStatus aclStatus = null;
statusPairs = new StatusPair[fileStatuses.length];
/*
* For each FileStatus, attempt to acquire an AclStatus. If the
* getAclStatus throws an exception, we assume that ACLs are turned
* off entirely and abandon the attempt.
*/
boolean useAcls = true; // Assume ACLs work until proven otherwise
for (int i = 0; i < fileStatuses.length; i++) {
if (useAcls) {
try {
aclStatus = fs.getAclStatus(fileStatuses[i].getPath());
} catch (AclException e) {
/* Almost certainly due to an "ACLs not enabled" exception */
aclStatus = null;
useAcls = false;
} catch (UnsupportedOperationException e) {
/* Ditto above - this is the case for a local file system */
aclStatus = null;
useAcls = false;
}
}
statusPairs[i] = new StatusPair(fileStatuses[i], aclStatus);
}
} }
inner.put(HttpFSFileSystem.FILE_STATUS_JSON, statuses);
json.put(HttpFSFileSystem.FILE_STATUSES_JSON, inner);
return json;
}
/** /**
* Return a Map suitable for conversion into JSON. * Not meant to be called directly except by the other toJson functions.
* @return A JSONish Map */
*/ private static Map<String, Object> toJsonInner(FileStatus fileStatus,
@SuppressWarnings({"unchecked"}) boolean emptyPathSuffix) {
public Map<String,Object> toJson() { Map<String, Object> json = new LinkedHashMap<String, Object>();
Map<String,Object> json = new LinkedHashMap<String,Object>(); json.put(HttpFSFileSystem.PATH_SUFFIX_JSON,
Map<String,Object> inner = new LinkedHashMap<String,Object>(); (emptyPathSuffix) ? "" : fileStatus.getPath().getName());
JSONArray statuses = new JSONArray(); json.put(HttpFSFileSystem.TYPE_JSON,
for (StatusPair s : statusPairs) { HttpFSFileSystem.FILE_TYPE.getType(fileStatus).toString());
statuses.add(s.toJsonInner(false)); json.put(HttpFSFileSystem.LENGTH_JSON, fileStatus.getLen());
} json.put(HttpFSFileSystem.OWNER_JSON, fileStatus.getOwner());
inner.put(HttpFSFileSystem.FILE_STATUS_JSON, statuses); json.put(HttpFSFileSystem.GROUP_JSON, fileStatus.getGroup());
json.put(HttpFSFileSystem.FILE_STATUSES_JSON, inner); json.put(HttpFSFileSystem.PERMISSION_JSON,
return json; HttpFSFileSystem.permissionToString(fileStatus.getPermission()));
json.put(HttpFSFileSystem.ACCESS_TIME_JSON, fileStatus.getAccessTime());
json.put(HttpFSFileSystem.MODIFICATION_TIME_JSON,
fileStatus.getModificationTime());
json.put(HttpFSFileSystem.BLOCK_SIZE_JSON, fileStatus.getBlockSize());
json.put(HttpFSFileSystem.REPLICATION_JSON, fileStatus.getReplication());
if (fileStatus.getPermission().getAclBit()) {
json.put(HttpFSFileSystem.ACL_BIT_JSON, true);
} }
if (fileStatus.getPermission().getEncryptedBit()) {
json.put(HttpFSFileSystem.ENC_BIT_JSON, true);
}
return json;
} }
/** Converts an <code>AclStatus</code> object into a JSON object. /** Converts an <code>AclStatus</code> object into a JSON object.
@ -637,8 +547,8 @@ public class FSOperations {
*/ */
@Override @Override
public Map execute(FileSystem fs) throws IOException { public Map execute(FileSystem fs) throws IOException {
StatusPair sp = new StatusPair(fs, path); FileStatus status = fs.getFileStatus(path);
return sp.toJson(); return toJson(status);
} }
} }
@ -703,8 +613,8 @@ public class FSOperations {
*/ */
@Override @Override
public Map execute(FileSystem fs) throws IOException { public Map execute(FileSystem fs) throws IOException {
StatusPairs sp = new StatusPairs(fs, path, filter); FileStatus[] fileStatuses = fs.listStatus(path, filter);
return sp.toJson(); return toJson(fileStatuses);
} }
@Override @Override

View File

@ -40,6 +40,7 @@ import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir; import org.apache.hadoop.test.TestDir;
import org.apache.hadoop.test.TestDirHelper; import org.apache.hadoop.test.TestDirHelper;
import org.apache.hadoop.test.TestHdfs; import org.apache.hadoop.test.TestHdfs;
import org.apache.hadoop.test.TestHdfsHelper;
import org.apache.hadoop.test.TestJetty; import org.apache.hadoop.test.TestJetty;
import org.apache.hadoop.test.TestJettyHelper; import org.apache.hadoop.test.TestJettyHelper;
import org.junit.Assert; import org.junit.Assert;
@ -66,6 +67,11 @@ import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
public abstract class BaseTestHttpFSWith extends HFSTestCase { public abstract class BaseTestHttpFSWith extends HFSTestCase {
@ -81,9 +87,9 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
private void createHttpFSServer() throws Exception { private void createHttpFSServer() throws Exception {
File homeDir = TestDirHelper.getTestDir(); File homeDir = TestDirHelper.getTestDir();
Assert.assertTrue(new File(homeDir, "conf").mkdir()); assertTrue(new File(homeDir, "conf").mkdir());
Assert.assertTrue(new File(homeDir, "log").mkdir()); assertTrue(new File(homeDir, "log").mkdir());
Assert.assertTrue(new File(homeDir, "temp").mkdir()); assertTrue(new File(homeDir, "temp").mkdir());
HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath()); HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
File secretFile = new File(new File(homeDir, "conf"), "secret"); File secretFile = new File(new File(homeDir, "conf"), "secret");
@ -143,7 +149,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
Assert.assertNotNull(fs); Assert.assertNotNull(fs);
URI uri = new URI(getScheme() + "://" + URI uri = new URI(getScheme() + "://" +
TestJettyHelper.getJettyURL().toURI().getAuthority()); TestJettyHelper.getJettyURL().toURI().getAuthority());
Assert.assertEquals(fs.getUri(), uri); assertEquals(fs.getUri(), uri);
fs.close(); fs.close();
} }
@ -156,7 +162,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs.close(); fs.close();
fs = getHttpFSFileSystem(); fs = getHttpFSFileSystem();
InputStream is = fs.open(new Path(path.toUri().getPath())); InputStream is = fs.open(new Path(path.toUri().getPath()));
Assert.assertEquals(is.read(), 1); assertEquals(is.read(), 1);
is.close(); is.close();
fs.close(); fs.close();
} }
@ -173,12 +179,12 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = FileSystem.get(getProxiedFSConf()); fs = FileSystem.get(getProxiedFSConf());
FileStatus status = fs.getFileStatus(path); FileStatus status = fs.getFileStatus(path);
if (!isLocalFS()) { if (!isLocalFS()) {
Assert.assertEquals(status.getReplication(), 2); assertEquals(status.getReplication(), 2);
Assert.assertEquals(status.getBlockSize(), 100 * 1024 * 1024); assertEquals(status.getBlockSize(), 100 * 1024 * 1024);
} }
Assert.assertEquals(status.getPermission(), permission); assertEquals(status.getPermission(), permission);
InputStream is = fs.open(path); InputStream is = fs.open(path);
Assert.assertEquals(is.read(), 1); assertEquals(is.read(), 1);
is.close(); is.close();
fs.close(); fs.close();
} }
@ -216,9 +222,9 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs.close(); fs.close();
fs = FileSystem.get(getProxiedFSConf()); fs = FileSystem.get(getProxiedFSConf());
InputStream is = fs.open(path); InputStream is = fs.open(path);
Assert.assertEquals(is.read(), 1); assertEquals(is.read(), 1);
Assert.assertEquals(is.read(), 2); assertEquals(is.read(), 2);
Assert.assertEquals(is.read(), -1); assertEquals(is.read(), -1);
is.close(); is.close();
fs.close(); fs.close();
} }
@ -239,10 +245,10 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
final int newLength = blockSize; final int newLength = blockSize;
boolean isReady = fs.truncate(file, newLength); boolean isReady = fs.truncate(file, newLength);
Assert.assertTrue("Recovery is not expected.", isReady); assertTrue("Recovery is not expected.", isReady);
FileStatus fileStatus = fs.getFileStatus(file); FileStatus fileStatus = fs.getFileStatus(file);
Assert.assertEquals(fileStatus.getLen(), newLength); assertEquals(fileStatus.getLen(), newLength);
AppendTestUtil.checkFullFile(fs, file, newLength, data, file.toString()); AppendTestUtil.checkFullFile(fs, file, newLength, data, file.toString());
fs.close(); fs.close();
@ -266,9 +272,9 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs.concat(path1, new Path[]{path2, path3}); fs.concat(path1, new Path[]{path2, path3});
fs.close(); fs.close();
fs = FileSystem.get(config); fs = FileSystem.get(config);
Assert.assertTrue(fs.exists(path1)); assertTrue(fs.exists(path1));
Assert.assertFalse(fs.exists(path2)); assertFalse(fs.exists(path2));
Assert.assertFalse(fs.exists(path3)); assertFalse(fs.exists(path3));
fs.close(); fs.close();
} }
} }
@ -284,8 +290,8 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs.rename(oldPath, newPath); fs.rename(oldPath, newPath);
fs.close(); fs.close();
fs = FileSystem.get(getProxiedFSConf()); fs = FileSystem.get(getProxiedFSConf());
Assert.assertFalse(fs.exists(oldPath)); assertFalse(fs.exists(oldPath));
Assert.assertTrue(fs.exists(newPath)); assertTrue(fs.exists(newPath));
fs.close(); fs.close();
} }
@ -299,8 +305,8 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs.mkdirs(foe); fs.mkdirs(foe);
FileSystem hoopFs = getHttpFSFileSystem(); FileSystem hoopFs = getHttpFSFileSystem();
Assert.assertTrue(hoopFs.delete(new Path(foo.toUri().getPath()), false)); assertTrue(hoopFs.delete(new Path(foo.toUri().getPath()), false));
Assert.assertFalse(fs.exists(foo)); assertFalse(fs.exists(foo));
try { try {
hoopFs.delete(new Path(bar.toUri().getPath()), false); hoopFs.delete(new Path(bar.toUri().getPath()), false);
Assert.fail(); Assert.fail();
@ -308,13 +314,13 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
} catch (Exception ex) { } catch (Exception ex) {
Assert.fail(); Assert.fail();
} }
Assert.assertTrue(fs.exists(bar)); assertTrue(fs.exists(bar));
Assert.assertTrue(hoopFs.delete(new Path(bar.toUri().getPath()), true)); assertTrue(hoopFs.delete(new Path(bar.toUri().getPath()), true));
Assert.assertFalse(fs.exists(bar)); assertFalse(fs.exists(bar));
Assert.assertTrue(fs.exists(foe)); assertTrue(fs.exists(foe));
Assert.assertTrue(hoopFs.delete(foe, true)); assertTrue(hoopFs.delete(foe, true));
Assert.assertFalse(fs.exists(foe)); assertFalse(fs.exists(foe));
hoopFs.close(); hoopFs.close();
fs.close(); fs.close();
@ -333,19 +339,20 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
FileStatus status2 = fs.getFileStatus(new Path(path.toUri().getPath())); FileStatus status2 = fs.getFileStatus(new Path(path.toUri().getPath()));
fs.close(); fs.close();
Assert.assertEquals(status2.getPermission(), status1.getPermission()); assertEquals(status2.getPermission(), status1.getPermission());
Assert.assertEquals(status2.getPath().toUri().getPath(), status1.getPath().toUri().getPath()); assertEquals(status2.getPath().toUri().getPath(),
Assert.assertEquals(status2.getReplication(), status1.getReplication()); status1.getPath().toUri().getPath());
Assert.assertEquals(status2.getBlockSize(), status1.getBlockSize()); assertEquals(status2.getReplication(), status1.getReplication());
Assert.assertEquals(status2.getAccessTime(), status1.getAccessTime()); assertEquals(status2.getBlockSize(), status1.getBlockSize());
Assert.assertEquals(status2.getModificationTime(), status1.getModificationTime()); assertEquals(status2.getAccessTime(), status1.getAccessTime());
Assert.assertEquals(status2.getOwner(), status1.getOwner()); assertEquals(status2.getModificationTime(), status1.getModificationTime());
Assert.assertEquals(status2.getGroup(), status1.getGroup()); assertEquals(status2.getOwner(), status1.getOwner());
Assert.assertEquals(status2.getLen(), status1.getLen()); assertEquals(status2.getGroup(), status1.getGroup());
assertEquals(status2.getLen(), status1.getLen());
FileStatus[] stati = fs.listStatus(path.getParent()); FileStatus[] stati = fs.listStatus(path.getParent());
Assert.assertEquals(stati.length, 1); assertEquals(stati.length, 1);
Assert.assertEquals(stati[0].getPath().getName(), path.getName()); assertEquals(stati[0].getPath().getName(), path.getName());
} }
private void testWorkingdirectory() throws Exception { private void testWorkingdirectory() throws Exception {
@ -359,14 +366,15 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
} }
Path httpFSWorkingDir = fs.getWorkingDirectory(); Path httpFSWorkingDir = fs.getWorkingDirectory();
fs.close(); fs.close();
Assert.assertEquals(httpFSWorkingDir.toUri().getPath(), assertEquals(httpFSWorkingDir.toUri().getPath(),
workingDir.toUri().getPath()); workingDir.toUri().getPath());
fs = getHttpFSFileSystem(); fs = getHttpFSFileSystem();
fs.setWorkingDirectory(new Path("/tmp")); fs.setWorkingDirectory(new Path("/tmp"));
workingDir = fs.getWorkingDirectory(); workingDir = fs.getWorkingDirectory();
fs.close(); fs.close();
Assert.assertEquals(workingDir.toUri().getPath(), new Path("/tmp").toUri().getPath()); assertEquals(workingDir.toUri().getPath(),
new Path("/tmp").toUri().getPath());
} }
private void testMkdirs() throws Exception { private void testMkdirs() throws Exception {
@ -375,7 +383,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs.mkdirs(path); fs.mkdirs(path);
fs.close(); fs.close();
fs = FileSystem.get(getProxiedFSConf()); fs = FileSystem.get(getProxiedFSConf());
Assert.assertTrue(fs.exists(path)); assertTrue(fs.exists(path));
fs.close(); fs.close();
} }
@ -400,8 +408,8 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs.close(); fs.close();
long atNew = status1.getAccessTime(); long atNew = status1.getAccessTime();
long mtNew = status1.getModificationTime(); long mtNew = status1.getModificationTime();
Assert.assertEquals(mtNew, mt - 10); assertEquals(mtNew, mt - 10);
Assert.assertEquals(atNew, at - 20); assertEquals(atNew, at - 20);
} }
} }
@ -419,7 +427,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
FileStatus status1 = fs.getFileStatus(path); FileStatus status1 = fs.getFileStatus(path);
fs.close(); fs.close();
FsPermission permission2 = status1.getPermission(); FsPermission permission2 = status1.getPermission();
Assert.assertEquals(permission2, permission1); assertEquals(permission2, permission1);
//sticky bit //sticky bit
fs = getHttpFSFileSystem(); fs = getHttpFSFileSystem();
@ -431,8 +439,8 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
status1 = fs.getFileStatus(path); status1 = fs.getFileStatus(path);
fs.close(); fs.close();
permission2 = status1.getPermission(); permission2 = status1.getPermission();
Assert.assertTrue(permission2.getStickyBit()); assertTrue(permission2.getStickyBit());
Assert.assertEquals(permission2, permission1); assertEquals(permission2, permission1);
} }
private void testSetOwner() throws Exception { private void testSetOwner() throws Exception {
@ -454,8 +462,8 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = FileSystem.get(getProxiedFSConf()); fs = FileSystem.get(getProxiedFSConf());
FileStatus status1 = fs.getFileStatus(path); FileStatus status1 = fs.getFileStatus(path);
fs.close(); fs.close();
Assert.assertEquals(status1.getOwner(), user); assertEquals(status1.getOwner(), user);
Assert.assertEquals(status1.getGroup(), group); assertEquals(status1.getGroup(), group);
} }
} }
@ -475,7 +483,7 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = FileSystem.get(getProxiedFSConf()); fs = FileSystem.get(getProxiedFSConf());
FileStatus status1 = fs.getFileStatus(path); FileStatus status1 = fs.getFileStatus(path);
fs.close(); fs.close();
Assert.assertEquals(status1.getReplication(), (short) 1); assertEquals(status1.getReplication(), (short) 1);
} }
private void testChecksum() throws Exception { private void testChecksum() throws Exception {
@ -491,9 +499,10 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = getHttpFSFileSystem(); fs = getHttpFSFileSystem();
FileChecksum httpChecksum = fs.getFileChecksum(path); FileChecksum httpChecksum = fs.getFileChecksum(path);
fs.close(); fs.close();
Assert.assertEquals(httpChecksum.getAlgorithmName(), hdfsChecksum.getAlgorithmName()); assertEquals(httpChecksum.getAlgorithmName(),
Assert.assertEquals(httpChecksum.getLength(), hdfsChecksum.getLength()); hdfsChecksum.getAlgorithmName());
Assert.assertArrayEquals(httpChecksum.getBytes(), hdfsChecksum.getBytes()); assertEquals(httpChecksum.getLength(), hdfsChecksum.getLength());
assertArrayEquals(httpChecksum.getBytes(), hdfsChecksum.getBytes());
} }
} }
@ -508,12 +517,17 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = getHttpFSFileSystem(); fs = getHttpFSFileSystem();
ContentSummary httpContentSummary = fs.getContentSummary(path); ContentSummary httpContentSummary = fs.getContentSummary(path);
fs.close(); fs.close();
Assert.assertEquals(httpContentSummary.getDirectoryCount(), hdfsContentSummary.getDirectoryCount()); assertEquals(httpContentSummary.getDirectoryCount(),
Assert.assertEquals(httpContentSummary.getFileCount(), hdfsContentSummary.getFileCount()); hdfsContentSummary.getDirectoryCount());
Assert.assertEquals(httpContentSummary.getLength(), hdfsContentSummary.getLength()); assertEquals(httpContentSummary.getFileCount(),
Assert.assertEquals(httpContentSummary.getQuota(), hdfsContentSummary.getQuota()); hdfsContentSummary.getFileCount());
Assert.assertEquals(httpContentSummary.getSpaceConsumed(), hdfsContentSummary.getSpaceConsumed()); assertEquals(httpContentSummary.getLength(),
Assert.assertEquals(httpContentSummary.getSpaceQuota(), hdfsContentSummary.getSpaceQuota()); hdfsContentSummary.getLength());
assertEquals(httpContentSummary.getQuota(), hdfsContentSummary.getQuota());
assertEquals(httpContentSummary.getSpaceConsumed(),
hdfsContentSummary.getSpaceConsumed());
assertEquals(httpContentSummary.getSpaceQuota(),
hdfsContentSummary.getSpaceQuota());
} }
/** Set xattr */ /** Set xattr */
@ -552,11 +566,11 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = FileSystem.get(getProxiedFSConf()); fs = FileSystem.get(getProxiedFSConf());
Map<String, byte[]> xAttrs = fs.getXAttrs(path); Map<String, byte[]> xAttrs = fs.getXAttrs(path);
fs.close(); fs.close();
Assert.assertEquals(4, xAttrs.size()); assertEquals(4, xAttrs.size());
Assert.assertArrayEquals(value1, xAttrs.get(name1)); assertArrayEquals(value1, xAttrs.get(name1));
Assert.assertArrayEquals(value2, xAttrs.get(name2)); assertArrayEquals(value2, xAttrs.get(name2));
Assert.assertArrayEquals(new byte[0], xAttrs.get(name3)); assertArrayEquals(new byte[0], xAttrs.get(name3));
Assert.assertArrayEquals(value4, xAttrs.get(name4)); assertArrayEquals(value4, xAttrs.get(name4));
} }
} }
@ -595,16 +609,16 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
names.add(name4); names.add(name4);
Map<String, byte[]> xAttrs = fs.getXAttrs(path, names); Map<String, byte[]> xAttrs = fs.getXAttrs(path, names);
fs.close(); fs.close();
Assert.assertEquals(4, xAttrs.size()); assertEquals(4, xAttrs.size());
Assert.assertArrayEquals(value1, xAttrs.get(name1)); assertArrayEquals(value1, xAttrs.get(name1));
Assert.assertArrayEquals(value2, xAttrs.get(name2)); assertArrayEquals(value2, xAttrs.get(name2));
Assert.assertArrayEquals(new byte[0], xAttrs.get(name3)); assertArrayEquals(new byte[0], xAttrs.get(name3));
Assert.assertArrayEquals(value4, xAttrs.get(name4)); assertArrayEquals(value4, xAttrs.get(name4));
// Get specific xattr // Get specific xattr
fs = getHttpFSFileSystem(); fs = getHttpFSFileSystem();
byte[] value = fs.getXAttr(path, name1); byte[] value = fs.getXAttr(path, name1);
Assert.assertArrayEquals(value1, value); assertArrayEquals(value1, value);
final String name5 = "a1"; final String name5 = "a1";
try { try {
value = fs.getXAttr(path, name5); value = fs.getXAttr(path, name5);
@ -618,11 +632,11 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = getHttpFSFileSystem(); fs = getHttpFSFileSystem();
xAttrs = fs.getXAttrs(path); xAttrs = fs.getXAttrs(path);
fs.close(); fs.close();
Assert.assertEquals(4, xAttrs.size()); assertEquals(4, xAttrs.size());
Assert.assertArrayEquals(value1, xAttrs.get(name1)); assertArrayEquals(value1, xAttrs.get(name1));
Assert.assertArrayEquals(value2, xAttrs.get(name2)); assertArrayEquals(value2, xAttrs.get(name2));
Assert.assertArrayEquals(new byte[0], xAttrs.get(name3)); assertArrayEquals(new byte[0], xAttrs.get(name3));
Assert.assertArrayEquals(value4, xAttrs.get(name4)); assertArrayEquals(value4, xAttrs.get(name4));
} }
} }
@ -667,8 +681,8 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = FileSystem.get(getProxiedFSConf()); fs = FileSystem.get(getProxiedFSConf());
Map<String, byte[]> xAttrs = fs.getXAttrs(path); Map<String, byte[]> xAttrs = fs.getXAttrs(path);
fs.close(); fs.close();
Assert.assertEquals(1, xAttrs.size()); assertEquals(1, xAttrs.size());
Assert.assertArrayEquals(value2, xAttrs.get(name2)); assertArrayEquals(value2, xAttrs.get(name2));
} }
} }
@ -700,11 +714,11 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
fs = getHttpFSFileSystem(); fs = getHttpFSFileSystem();
List<String> names = fs.listXAttrs(path); List<String> names = fs.listXAttrs(path);
Assert.assertEquals(4, names.size()); assertEquals(4, names.size());
Assert.assertTrue(names.contains(name1)); assertTrue(names.contains(name1));
Assert.assertTrue(names.contains(name2)); assertTrue(names.contains(name2));
Assert.assertTrue(names.contains(name3)); assertTrue(names.contains(name3));
Assert.assertTrue(names.contains(name4)); assertTrue(names.contains(name4));
} }
} }
@ -715,18 +729,26 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
* @throws Exception * @throws Exception
*/ */
private void assertSameAcls(AclStatus a, AclStatus b) throws Exception { private void assertSameAcls(AclStatus a, AclStatus b) throws Exception {
Assert.assertTrue(a.getOwner().equals(b.getOwner())); assertTrue(a.getOwner().equals(b.getOwner()));
Assert.assertTrue(a.getGroup().equals(b.getGroup())); assertTrue(a.getGroup().equals(b.getGroup()));
Assert.assertTrue(a.isStickyBit() == b.isStickyBit()); assertTrue(a.isStickyBit() == b.isStickyBit());
Assert.assertTrue(a.getEntries().size() == b.getEntries().size()); assertTrue(a.getEntries().size() == b.getEntries().size());
for (AclEntry e : a.getEntries()) { for (AclEntry e : a.getEntries()) {
Assert.assertTrue(b.getEntries().contains(e)); assertTrue(b.getEntries().contains(e));
} }
for (AclEntry e : b.getEntries()) { for (AclEntry e : b.getEntries()) {
Assert.assertTrue(a.getEntries().contains(e)); assertTrue(a.getEntries().contains(e));
} }
} }
private static void assertSameAclBit(FileSystem expected, FileSystem actual,
Path path) throws IOException {
FileStatus expectedFileStatus = expected.getFileStatus(path);
FileStatus actualFileStatus = actual.getFileStatus(path);
assertEquals(actualFileStatus.getPermission().getAclBit(),
expectedFileStatus.getPermission().getAclBit());
}
/** /**
* Simple ACL tests on a file: Set an acl, add an acl, remove one acl, * Simple ACL tests on a file: Set an acl, add an acl, remove one acl,
* and remove all acls. * and remove all acls.
@ -755,26 +777,31 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
AclStatus proxyAclStat = proxyFs.getAclStatus(path); AclStatus proxyAclStat = proxyFs.getAclStatus(path);
AclStatus httpfsAclStat = httpfs.getAclStatus(path); AclStatus httpfsAclStat = httpfs.getAclStatus(path);
assertSameAcls(httpfsAclStat, proxyAclStat); assertSameAcls(httpfsAclStat, proxyAclStat);
assertSameAclBit(httpfs, proxyFs, path);
httpfs.setAcl(path, AclEntry.parseAclSpec(aclSet,true)); httpfs.setAcl(path, AclEntry.parseAclSpec(aclSet,true));
proxyAclStat = proxyFs.getAclStatus(path); proxyAclStat = proxyFs.getAclStatus(path);
httpfsAclStat = httpfs.getAclStatus(path); httpfsAclStat = httpfs.getAclStatus(path);
assertSameAcls(httpfsAclStat, proxyAclStat); assertSameAcls(httpfsAclStat, proxyAclStat);
assertSameAclBit(httpfs, proxyFs, path);
httpfs.modifyAclEntries(path, AclEntry.parseAclSpec(aclUser2, true)); httpfs.modifyAclEntries(path, AclEntry.parseAclSpec(aclUser2, true));
proxyAclStat = proxyFs.getAclStatus(path); proxyAclStat = proxyFs.getAclStatus(path);
httpfsAclStat = httpfs.getAclStatus(path); httpfsAclStat = httpfs.getAclStatus(path);
assertSameAcls(httpfsAclStat, proxyAclStat); assertSameAcls(httpfsAclStat, proxyAclStat);
assertSameAclBit(httpfs, proxyFs, path);
httpfs.removeAclEntries(path, AclEntry.parseAclSpec(rmAclUser1, false)); httpfs.removeAclEntries(path, AclEntry.parseAclSpec(rmAclUser1, false));
proxyAclStat = proxyFs.getAclStatus(path); proxyAclStat = proxyFs.getAclStatus(path);
httpfsAclStat = httpfs.getAclStatus(path); httpfsAclStat = httpfs.getAclStatus(path);
assertSameAcls(httpfsAclStat, proxyAclStat); assertSameAcls(httpfsAclStat, proxyAclStat);
assertSameAclBit(httpfs, proxyFs, path);
httpfs.removeAcl(path); httpfs.removeAcl(path);
proxyAclStat = proxyFs.getAclStatus(path); proxyAclStat = proxyFs.getAclStatus(path);
httpfsAclStat = httpfs.getAclStatus(path); httpfsAclStat = httpfs.getAclStatus(path);
assertSameAcls(httpfsAclStat, proxyAclStat); assertSameAcls(httpfsAclStat, proxyAclStat);
assertSameAclBit(httpfs, proxyFs, path);
} }
/** /**
@ -797,25 +824,46 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
AclStatus proxyAclStat = proxyFs.getAclStatus(dir); AclStatus proxyAclStat = proxyFs.getAclStatus(dir);
AclStatus httpfsAclStat = httpfs.getAclStatus(dir); AclStatus httpfsAclStat = httpfs.getAclStatus(dir);
assertSameAcls(httpfsAclStat, proxyAclStat); assertSameAcls(httpfsAclStat, proxyAclStat);
assertSameAclBit(httpfs, proxyFs, dir);
/* Set a default ACL on the directory */ /* Set a default ACL on the directory */
httpfs.setAcl(dir, (AclEntry.parseAclSpec(defUser1,true))); httpfs.setAcl(dir, (AclEntry.parseAclSpec(defUser1,true)));
proxyAclStat = proxyFs.getAclStatus(dir); proxyAclStat = proxyFs.getAclStatus(dir);
httpfsAclStat = httpfs.getAclStatus(dir); httpfsAclStat = httpfs.getAclStatus(dir);
assertSameAcls(httpfsAclStat, proxyAclStat); assertSameAcls(httpfsAclStat, proxyAclStat);
assertSameAclBit(httpfs, proxyFs, dir);
/* Remove the default ACL */ /* Remove the default ACL */
httpfs.removeDefaultAcl(dir); httpfs.removeDefaultAcl(dir);
proxyAclStat = proxyFs.getAclStatus(dir); proxyAclStat = proxyFs.getAclStatus(dir);
httpfsAclStat = httpfs.getAclStatus(dir); httpfsAclStat = httpfs.getAclStatus(dir);
assertSameAcls(httpfsAclStat, proxyAclStat); assertSameAcls(httpfsAclStat, proxyAclStat);
assertSameAclBit(httpfs, proxyFs, dir);
}
private void testEncryption() throws Exception {
if (isLocalFS()) {
return;
}
FileSystem proxyFs = FileSystem.get(getProxiedFSConf());
FileSystem httpFs = getHttpFSFileSystem();
FileStatus proxyStatus = proxyFs.getFileStatus(TestHdfsHelper
.ENCRYPTED_FILE);
assertTrue(proxyStatus.isEncrypted());
FileStatus httpStatus = httpFs.getFileStatus(TestHdfsHelper
.ENCRYPTED_FILE);
assertTrue(httpStatus.isEncrypted());
proxyStatus = proxyFs.getFileStatus(new Path("/"));
httpStatus = httpFs.getFileStatus(new Path("/"));
assertFalse(proxyStatus.isEncrypted());
assertFalse(httpStatus.isEncrypted());
} }
protected enum Operation { protected enum Operation {
GET, OPEN, CREATE, APPEND, TRUNCATE, CONCAT, RENAME, DELETE, LIST_STATUS, GET, OPEN, CREATE, APPEND, TRUNCATE, CONCAT, RENAME, DELETE, LIST_STATUS,
WORKING_DIRECTORY, MKDIRS, SET_TIMES, SET_PERMISSION, SET_OWNER, WORKING_DIRECTORY, MKDIRS, SET_TIMES, SET_PERMISSION, SET_OWNER,
SET_REPLICATION, CHECKSUM, CONTENT_SUMMARY, FILEACLS, DIRACLS, SET_XATTR, SET_REPLICATION, CHECKSUM, CONTENT_SUMMARY, FILEACLS, DIRACLS, SET_XATTR,
GET_XATTRS, REMOVE_XATTR, LIST_XATTRS GET_XATTRS, REMOVE_XATTR, LIST_XATTRS, ENCRYPTION
} }
private void operation(Operation op) throws Exception { private void operation(Operation op) throws Exception {
@ -889,6 +937,9 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
case LIST_XATTRS: case LIST_XATTRS:
testListXAttrs(); testListXAttrs();
break; break;
case ENCRYPTION:
testEncryption();
break;
} }
} }

View File

@ -21,10 +21,14 @@ import java.io.File;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemTestHelper;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Test; import org.junit.Test;
import org.junit.runners.model.FrameworkMethod; import org.junit.runners.model.FrameworkMethod;
@ -129,6 +133,9 @@ public class TestHdfsHelper extends TestDirHelper {
return new Configuration(conf); return new Configuration(conf);
} }
public static final Path ENCRYPTION_ZONE = new Path("/ez");
public static final Path ENCRYPTED_FILE = new Path("/ez/encfile");
private static MiniDFSCluster MINI_DFS = null; private static MiniDFSCluster MINI_DFS = null;
private static synchronized MiniDFSCluster startMiniHdfs(Configuration conf) throws Exception { private static synchronized MiniDFSCluster startMiniHdfs(Configuration conf) throws Exception {
@ -148,14 +155,28 @@ public class TestHdfsHelper extends TestDirHelper {
conf.set("hadoop.security.authentication", "simple"); conf.set("hadoop.security.authentication", "simple");
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, true);
FileSystemTestHelper helper = new FileSystemTestHelper();
final String jceksPath = JavaKeyStoreProvider.SCHEME_NAME + "://file" +
new Path(helper.getTestRootDir(), "test.jks").toUri();
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, jceksPath);
MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
builder.numDataNodes(2); builder.numDataNodes(2);
MiniDFSCluster miniHdfs = builder.build(); MiniDFSCluster miniHdfs = builder.build();
FileSystem fileSystem = miniHdfs.getFileSystem(); final String testkey = "testkey";
DFSTestUtil.createKey(testkey, miniHdfs, conf);
DistributedFileSystem fileSystem = miniHdfs.getFileSystem();
fileSystem.getClient().setKeyProvider(miniHdfs.getNameNode()
.getNamesystem().getProvider());
fileSystem.mkdirs(new Path("/tmp")); fileSystem.mkdirs(new Path("/tmp"));
fileSystem.mkdirs(new Path("/user")); fileSystem.mkdirs(new Path("/user"));
fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx")); fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx")); fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.mkdirs(ENCRYPTION_ZONE);
fileSystem.createEncryptionZone(ENCRYPTION_ZONE, testkey);
fileSystem.create(ENCRYPTED_FILE).close();
MINI_DFS = miniHdfs; MINI_DFS = miniHdfs;
} }
return MINI_DFS; return MINI_DFS;