HDFS-12139. HTTPFS liststatus returns incorrect pathSuffix for path of file. Contributed by Yongjun Zhang.

This commit is contained in:
Yongjun Zhang 2017-07-19 10:54:13 -07:00
parent 413b23eb04
commit 3556e36be3
2 changed files with 34 additions and 7 deletions

View File

@ -75,15 +75,17 @@ public class FSOperations {
/** /**
* @param fileStatuses list of FileStatus objects * @param fileStatuses list of FileStatus objects
* @param isFile is the fileStatuses from a file path
* @return JSON map suitable for wire transport * @return JSON map suitable for wire transport
*/ */
@SuppressWarnings({"unchecked"}) @SuppressWarnings({"unchecked"})
private static Map<String, Object> toJson(FileStatus[] fileStatuses) { private static Map<String, Object> toJson(FileStatus[] fileStatuses,
boolean isFile) {
Map<String, Object> json = new LinkedHashMap<>(); Map<String, Object> json = new LinkedHashMap<>();
Map<String, Object> inner = new LinkedHashMap<>(); Map<String, Object> inner = new LinkedHashMap<>();
JSONArray statuses = new JSONArray(); JSONArray statuses = new JSONArray();
for (FileStatus f : fileStatuses) { for (FileStatus f : fileStatuses) {
statuses.add(toJsonInner(f, false)); statuses.add(toJsonInner(f, isFile));
} }
inner.put(HttpFSFileSystem.FILE_STATUS_JSON, statuses); inner.put(HttpFSFileSystem.FILE_STATUS_JSON, statuses);
json.put(HttpFSFileSystem.FILE_STATUSES_JSON, inner); json.put(HttpFSFileSystem.FILE_STATUSES_JSON, inner);
@ -129,13 +131,14 @@ public class FSOperations {
* These two classes are slightly different, due to the impedance * These two classes are slightly different, due to the impedance
* mismatches between the WebHDFS and FileSystem APIs. * mismatches between the WebHDFS and FileSystem APIs.
* @param entries * @param entries
* @param isFile is the entries from a file path
* @return json * @return json
*/ */
private static Map<String, Object> toJson(FileSystem.DirectoryEntries private static Map<String, Object> toJson(FileSystem.DirectoryEntries
entries) { entries, boolean isFile) {
Map<String, Object> json = new LinkedHashMap<>(); Map<String, Object> json = new LinkedHashMap<>();
Map<String, Object> inner = new LinkedHashMap<>(); Map<String, Object> inner = new LinkedHashMap<>();
Map<String, Object> fileStatuses = toJson(entries.getEntries()); Map<String, Object> fileStatuses = toJson(entries.getEntries(), isFile);
inner.put(HttpFSFileSystem.PARTIAL_LISTING_JSON, fileStatuses); inner.put(HttpFSFileSystem.PARTIAL_LISTING_JSON, fileStatuses);
inner.put(HttpFSFileSystem.REMAINING_ENTRIES_JSON, entries.hasMore() ? 1 inner.put(HttpFSFileSystem.REMAINING_ENTRIES_JSON, entries.hasMore() ? 1
: 0); : 0);
@ -690,7 +693,7 @@ public class FSOperations {
@Override @Override
public Map execute(FileSystem fs) throws IOException { public Map execute(FileSystem fs) throws IOException {
FileStatus[] fileStatuses = fs.listStatus(path, filter); FileStatus[] fileStatuses = fs.listStatus(path, filter);
return toJson(fileStatuses); return toJson(fileStatuses, fs.getFileStatus(path).isFile());
} }
@Override @Override
@ -735,7 +738,7 @@ public class FSOperations {
WrappedFileSystem wrappedFS = new WrappedFileSystem(fs); WrappedFileSystem wrappedFS = new WrappedFileSystem(fs);
FileSystem.DirectoryEntries entries = FileSystem.DirectoryEntries entries =
wrappedFS.listStatusBatch(path, token); wrappedFS.listStatusBatch(path, token);
return toJson(entries); return toJson(entries, wrappedFS.getFileStatus(path).isFile());
} }
} }

View File

@ -364,8 +364,15 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
assertEquals(status2.getLen(), status1.getLen()); assertEquals(status2.getLen(), status1.getLen());
FileStatus[] stati = fs.listStatus(path.getParent()); FileStatus[] stati = fs.listStatus(path.getParent());
assertEquals(stati.length, 1); assertEquals(1, stati.length);
assertEquals(stati[0].getPath().getName(), path.getName()); assertEquals(stati[0].getPath().getName(), path.getName());
// The full path should be the path to the file. See HDFS-12139
FileStatus[] statl = fs.listStatus(path);
Assert.assertEquals(1, statl.length);
Assert.assertEquals(status2.getPath(), statl[0].getPath());
Assert.assertEquals(statl[0].getPath().getName(), path.getName());
Assert.assertEquals(stati[0].getPath(), statl[0].getPath());
} }
private static void assertSameListing(FileSystem expected, FileSystem private static void assertSameListing(FileSystem expected, FileSystem
@ -411,6 +418,23 @@ public abstract class BaseTestHttpFSWith extends HFSTestCase {
proxyFs.create(new Path(dir, "file" + i)).close(); proxyFs.create(new Path(dir, "file" + i)).close();
assertSameListing(proxyFs, httpFs, dir); assertSameListing(proxyFs, httpFs, dir);
} }
// Test for HDFS-12139
Path dir1 = new Path(getProxiedFSTestDir(), "dir1");
proxyFs.mkdirs(dir1);
Path file1 = new Path(dir1, "file1");
proxyFs.create(file1).close();
RemoteIterator<FileStatus> si = proxyFs.listStatusIterator(dir1);
FileStatus statusl = si.next();
FileStatus status = proxyFs.getFileStatus(file1);
Assert.assertEquals(file1.getName(), statusl.getPath().getName());
Assert.assertEquals(status.getPath(), statusl.getPath());
si = proxyFs.listStatusIterator(file1);
statusl = si.next();
Assert.assertEquals(file1.getName(), statusl.getPath().getName());
Assert.assertEquals(status.getPath(), statusl.getPath());
} }
private void testWorkingdirectory() throws Exception { private void testWorkingdirectory() throws Exception {