svn merge -c 1180757 from trunk for HDFS-2404.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1189480 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2011-10-26 21:31:04 +00:00
parent d28641b0b4
commit 05cbd76c44
5 changed files with 37 additions and 33 deletions

View File

@ -1095,6 +1095,8 @@ Release 0.23.0 - Unreleased
HDFS-2409. _HOST in dfs.web.authentication.kerberos.principal. (jitendra) HDFS-2409. _HOST in dfs.web.authentication.kerberos.principal. (jitendra)
HDFS-2404. webhdfs liststatus json response is not correct. (suresh)
BREAKDOWN OF HDFS-1073 SUBTASKS BREAKDOWN OF HDFS-1073 SUBTASKS
HDFS-1521. Persist transaction ID on disk between NN restarts. HDFS-1521. Persist transaction ID on disk between NN restarts.

View File

@ -424,7 +424,7 @@ public class NamenodeWebHdfsMethods {
case GETFILESTATUS: case GETFILESTATUS:
{ {
final HdfsFileStatus status = np.getFileInfo(fullpath); final HdfsFileStatus status = np.getFileInfo(fullpath);
final String js = JsonUtil.toJsonString(status); final String js = JsonUtil.toJsonString(status, true);
return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
} }
case LISTSTATUS: case LISTSTATUS:
@ -480,22 +480,22 @@ public class NamenodeWebHdfsMethods {
@Override @Override
public void write(final OutputStream outstream) throws IOException { public void write(final OutputStream outstream) throws IOException {
final PrintStream out = new PrintStream(outstream); final PrintStream out = new PrintStream(outstream);
out.println("{\"" + HdfsFileStatus[].class.getSimpleName() + "\":["); out.println("{\"" + HdfsFileStatus.class.getSimpleName() + "\":[");
final HdfsFileStatus[] partial = first.getPartialListing(); final HdfsFileStatus[] partial = first.getPartialListing();
if (partial.length > 0) { if (partial.length > 0) {
out.print(JsonUtil.toJsonString(partial[0])); out.print(JsonUtil.toJsonString(partial[0], false));
} }
for(int i = 1; i < partial.length; i++) { for(int i = 1; i < partial.length; i++) {
out.println(','); out.println(',');
out.print(JsonUtil.toJsonString(partial[i])); out.print(JsonUtil.toJsonString(partial[i], false));
} }
for(DirectoryListing curr = first; curr.hasMore(); ) { for(DirectoryListing curr = first; curr.hasMore(); ) {
curr = getDirectoryListing(np, p, curr.getLastName()); curr = getDirectoryListing(np, p, curr.getLastName());
for(HdfsFileStatus s : curr.getPartialListing()) { for(HdfsFileStatus s : curr.getPartialListing()) {
out.println(','); out.println(',');
out.print(JsonUtil.toJsonString(s)); out.print(JsonUtil.toJsonString(s, false));
} }
} }

View File

@ -133,37 +133,39 @@ public class JsonUtil {
} }
/** Convert a HdfsFileStatus object to a Json string. */ /** Convert a HdfsFileStatus object to a Json string. */
public static String toJsonString(final HdfsFileStatus status) { public static String toJsonString(final HdfsFileStatus status,
boolean includeType) {
if (status == null) { if (status == null) {
return null; return null;
} else {
final Map<String, Object> m = new TreeMap<String, Object>();
m.put("localName", status.getLocalName());
m.put("isDir", status.isDir());
m.put("isSymlink", status.isSymlink());
if (status.isSymlink()) {
m.put("symlink", status.getSymlink());
}
m.put("len", status.getLen());
m.put("owner", status.getOwner());
m.put("group", status.getGroup());
m.put("permission", toString(status.getPermission()));
m.put("accessTime", status.getAccessTime());
m.put("modificationTime", status.getModificationTime());
m.put("blockSize", status.getBlockSize());
m.put("replication", status.getReplication());
return toJsonString(HdfsFileStatus.class, m);
} }
final Map<String, Object> m = new TreeMap<String, Object>();
m.put("localName", status.getLocalName());
m.put("isDir", status.isDir());
m.put("isSymlink", status.isSymlink());
if (status.isSymlink()) {
m.put("symlink", status.getSymlink());
}
m.put("len", status.getLen());
m.put("owner", status.getOwner());
m.put("group", status.getGroup());
m.put("permission", toString(status.getPermission()));
m.put("accessTime", status.getAccessTime());
m.put("modificationTime", status.getModificationTime());
m.put("blockSize", status.getBlockSize());
m.put("replication", status.getReplication());
return includeType ? toJsonString(HdfsFileStatus.class, m) :
JSON.toString(m);
} }
/** Convert a Json map to a HdfsFileStatus object. */ /** Convert a Json map to a HdfsFileStatus object. */
public static HdfsFileStatus toFileStatus(final Map<?, ?> json) { public static HdfsFileStatus toFileStatus(final Map<?, ?> json, boolean includesType) {
if (json == null) { if (json == null) {
return null; return null;
} }
final Map<?, ?> m = (Map<?, ?>)json.get(HdfsFileStatus.class.getSimpleName()); final Map<?, ?> m = includesType ?
(Map<?, ?>)json.get(HdfsFileStatus.class.getSimpleName()) : json;
final String localName = (String) m.get("localName"); final String localName = (String) m.get("localName");
final boolean isDir = (Boolean) m.get("isDir"); final boolean isDir = (Boolean) m.get("isDir");
final boolean isSymlink = (Boolean) m.get("isSymlink"); final boolean isSymlink = (Boolean) m.get("isSymlink");
@ -287,7 +289,7 @@ public class JsonUtil {
return array; return array;
} }
} }
/** Convert a LocatedBlock to a Json map. */ /** Convert a LocatedBlock to a Json map. */
private static Map<String, Object> toJsonMap(final LocatedBlock locatedblock private static Map<String, Object> toJsonMap(final LocatedBlock locatedblock
) throws IOException { ) throws IOException {

View File

@ -253,7 +253,7 @@ public class WebHdfsFileSystem extends HftpFileSystem {
private HdfsFileStatus getHdfsFileStatus(Path f) throws IOException { private HdfsFileStatus getHdfsFileStatus(Path f) throws IOException {
final HttpOpParam.Op op = GetOpParam.Op.GETFILESTATUS; final HttpOpParam.Op op = GetOpParam.Op.GETFILESTATUS;
final Map<String, Object> json = run(op, f); final Map<String, Object> json = run(op, f);
final HdfsFileStatus status = JsonUtil.toFileStatus(json); final HdfsFileStatus status = JsonUtil.toFileStatus(json, true);
if (status == null) { if (status == null) {
throw new FileNotFoundException("File does not exist: " + f); throw new FileNotFoundException("File does not exist: " + f);
} }
@ -405,14 +405,14 @@ public class WebHdfsFileSystem extends HftpFileSystem {
final HttpOpParam.Op op = GetOpParam.Op.LISTSTATUS; final HttpOpParam.Op op = GetOpParam.Op.LISTSTATUS;
final Map<?, ?> json = run(op, f); final Map<?, ?> json = run(op, f);
final Object[] array = (Object[])json.get( final Object[] array = (Object[])json.get(
HdfsFileStatus[].class.getSimpleName()); HdfsFileStatus.class.getSimpleName());
//convert FileStatus //convert FileStatus
final FileStatus[] statuses = new FileStatus[array.length]; final FileStatus[] statuses = new FileStatus[array.length];
for(int i = 0; i < array.length; i++) { for(int i = 0; i < array.length; i++) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
final Map<String, Object> m = (Map<String, Object>)array[i]; final Map<String, Object> m = (Map<String, Object>)array[i];
statuses[i] = makeQualified(JsonUtil.toFileStatus(m), f); statuses[i] = makeQualified(JsonUtil.toFileStatus(m, false), f);
} }
return statuses; return statuses;
} }
@ -472,4 +472,4 @@ public class WebHdfsFileSystem extends HftpFileSystem {
final Map<String, Object> m = run(op, p); final Map<String, Object> m = run(op, p);
return JsonUtil.toMD5MD5CRC32FileChecksum(m); return JsonUtil.toMD5MD5CRC32FileChecksum(m);
} }
} }

View File

@ -46,9 +46,9 @@ public class TestJsonUtil {
final FileStatus fstatus = toFileStatus(status, parent); final FileStatus fstatus = toFileStatus(status, parent);
System.out.println("status = " + status); System.out.println("status = " + status);
System.out.println("fstatus = " + fstatus); System.out.println("fstatus = " + fstatus);
final String json = JsonUtil.toJsonString(status); final String json = JsonUtil.toJsonString(status, true);
System.out.println("json = " + json.replace(",", ",\n ")); System.out.println("json = " + json.replace(",", ",\n "));
final HdfsFileStatus s2 = JsonUtil.toFileStatus((Map<?, ?>)JSON.parse(json)); final HdfsFileStatus s2 = JsonUtil.toFileStatus((Map<?, ?>)JSON.parse(json), true);
final FileStatus fs2 = toFileStatus(s2, parent); final FileStatus fs2 = toFileStatus(s2, parent);
System.out.println("s2 = " + s2); System.out.println("s2 = " + s2);
System.out.println("fs2 = " + fs2); System.out.println("fs2 = " + fs2);