From b4d6c5823b04b2a8834e06e78cd109a359496eed Mon Sep 17 00:00:00 2001 From: Kihwal Lee Date: Thu, 9 Jan 2014 19:24:07 +0000 Subject: [PATCH] HDFS-5449. WebHdfs compatibility broken between 2.2 and 1.x / 23.x. Contributed by Kihwal Lee. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1556927 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 + .../org/apache/hadoop/hdfs/web/JsonUtil.java | 46 ++++++++++++-- .../apache/hadoop/hdfs/web/TestJsonUtil.java | 62 ++++++++++++++++++- 3 files changed, 104 insertions(+), 6 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index fbd1d596c03..0440ff73b2c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -739,6 +739,8 @@ Release 2.4.0 - UNRELEASED HDFS-5690. DataNode fails to start in secure mode when dfs.http.policy equals to HTTP_ONLY. (Haohui Mai via jing9) + HDFS-5449. WebHdfs compatibility broken between 2.2 and 1.x / 23.x (kihwal) + BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS HDFS-4985. Add storage type to the protocol and expose it in block report diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index 63fbda61d6b..a96f61826e4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -271,7 +271,7 @@ public class JsonUtil { } /** Convert a DatanodeInfo to a Json map. */ - private static Map toJsonMap(final DatanodeInfo datanodeinfo) { + static Map toJsonMap(final DatanodeInfo datanodeinfo) { if (datanodeinfo == null) { return null; } @@ -279,6 +279,9 @@ public class JsonUtil { // TODO: Fix storageID final Map m = new TreeMap(); m.put("ipAddr", datanodeinfo.getIpAddr()); + // 'name' is equivalent to ipAddr:xferPort. Older clients (1.x, 0.23.x) + // expects this instead of the two fields. + m.put("name", datanodeinfo.getXferAddr()); m.put("hostName", datanodeinfo.getHostName()); m.put("storageID", datanodeinfo.getDatanodeUuid()); m.put("xferPort", datanodeinfo.getXferPort()); @@ -325,17 +328,49 @@ public class JsonUtil { } /** Convert a Json map to an DatanodeInfo object. */ - static DatanodeInfo toDatanodeInfo(final Map m) { + static DatanodeInfo toDatanodeInfo(final Map m) + throws IOException { if (m == null) { return null; } + // ipAddr and xferPort are the critical fields for accessing data. + // If any one of the two is missing, an exception needs to be thrown. + + // Handle the case of old servers (1.x, 0.23.x) sending 'name' instead + // of ipAddr and xferPort. + String ipAddr = getString(m, "ipAddr", null); + int xferPort = getInt(m, "xferPort", -1); + if (ipAddr == null) { + String name = getString(m, "name", null); + if (name != null) { + int colonIdx = name.indexOf(':'); + if (colonIdx > 0) { + ipAddr = name.substring(0, colonIdx); + xferPort = Integer.parseInt(name.substring(colonIdx +1)); + } else { + throw new IOException( + "Invalid value in server response: name=[" + name + "]"); + } + } else { + throw new IOException( + "Missing both 'ipAddr' and 'name' in server response."); + } + // ipAddr is non-null & non-empty string at this point. + } + + // Check the validity of xferPort. + if (xferPort == -1) { + throw new IOException( + "Invalid or missing 'xferPort' in server response."); + } + // TODO: Fix storageID return new DatanodeInfo( - (String)m.get("ipAddr"), + ipAddr, (String)m.get("hostName"), (String)m.get("storageID"), - (int)(long)(Long)m.get("xferPort"), + xferPort, (int)(long)(Long)m.get("infoPort"), getInt(m, "infoSecurePort", 0), (int)(long)(Long)m.get("ipcPort"), @@ -368,7 +403,8 @@ public class JsonUtil { } /** Convert an Object[] to a DatanodeInfo[]. */ - private static DatanodeInfo[] toDatanodeInfoArray(final Object[] objects) { + private static DatanodeInfo[] toDatanodeInfoArray(final Object[] objects) + throws IOException { if (objects == null) { return null; } else if (objects.length == 0) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java index f4f7095bcf3..6a5acab3e41 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.server.namenode.INodeId; import org.apache.hadoop.util.Time; @@ -61,7 +62,7 @@ public class TestJsonUtil { } @Test - public void testToDatanodeInfoWithoutSecurePort() { + public void testToDatanodeInfoWithoutSecurePort() throws Exception { Map response = new HashMap(); response.put("ipAddr", "127.0.0.1"); @@ -84,4 +85,63 @@ public class TestJsonUtil { JsonUtil.toDatanodeInfo(response); } + + @Test + public void testToDatanodeInfoWithName() throws Exception { + Map response = new HashMap(); + + // Older servers (1.x, 0.23, etc.) sends 'name' instead of ipAddr + // and xferPort. + String name = "127.0.0.1:1004"; + response.put("name", name); + response.put("hostName", "localhost"); + response.put("storageID", "fake-id"); + response.put("infoPort", 1338l); + response.put("ipcPort", 1339l); + response.put("capacity", 1024l); + response.put("dfsUsed", 512l); + response.put("remaining", 512l); + response.put("blockPoolUsed", 512l); + response.put("lastUpdate", 0l); + response.put("xceiverCount", 4096l); + response.put("networkLocation", "foo.bar.baz"); + response.put("adminState", "NORMAL"); + response.put("cacheCapacity", 123l); + response.put("cacheUsed", 321l); + + DatanodeInfo di = JsonUtil.toDatanodeInfo(response); + Assert.assertEquals(name, di.getXferAddr()); + + // The encoded result should contain name, ipAddr and xferPort. + Map r = JsonUtil.toJsonMap(di); + Assert.assertEquals(name, (String)r.get("name")); + Assert.assertEquals("127.0.0.1", (String)r.get("ipAddr")); + // In this test, it is Integer instead of Long since json was not actually + // involved in constructing the map. + Assert.assertEquals(1004, (int)(Integer)r.get("xferPort")); + + // Invalid names + String[] badNames = {"127.0.0.1", "127.0.0.1:", ":", "127.0.0.1:sweet", ":123"}; + for (String badName : badNames) { + response.put("name", badName); + checkDecodeFailure(response); + } + + // Missing both name and ipAddr + response.remove("name"); + checkDecodeFailure(response); + + // Only missing xferPort + response.put("ipAddr", "127.0.0.1"); + checkDecodeFailure(response); + } + + private void checkDecodeFailure(Map map) { + try { + JsonUtil.toDatanodeInfo(map); + Assert.fail("Exception not thrown against bad input."); + } catch (Exception e) { + // expected + } + } }