diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index f66f58620ac..647d59ef1a8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -382,6 +382,9 @@ Release 2.1.2 - UNRELEASED HDFS-5265. Namenode fails to start when dfs.https.port is unspecified. (Haohui Mai via jing9) + HDFS-5255. Distcp job fails with hsftp when https is enabled in insecure + cluster. (Arpit Agarwal) + Release 2.1.1-beta - 2013-09-23 INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java index dd5e9c6daa0..361f6a0c462 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java @@ -94,7 +94,6 @@ public class HftpFileSystem extends FileSystem private URI hftpURI; protected URI nnUri; - protected URI nnSecureUri; public static final String HFTP_TIMEZONE = "UTC"; public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ"; @@ -134,34 +133,33 @@ public class HftpFileSystem extends FileSystem DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT); } - protected int getDefaultSecurePort() { - return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, - DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT); - } - + /** + * We generate the address with one of the following ports, in + * order of preference. + * 1. Port from the hftp URI e.g. hftp://namenode:4000/ will return 4000. + * 2. Port configured via DFS_NAMENODE_HTTP_PORT_KEY + * 3. DFS_NAMENODE_HTTP_PORT_DEFAULT i.e. 50070. + * + * @param uri + * @return + */ protected InetSocketAddress getNamenodeAddr(URI uri) { // use authority so user supplied uri can override port return NetUtils.createSocketAddr(uri.getAuthority(), getDefaultPort()); } - protected InetSocketAddress getNamenodeSecureAddr(URI uri) { - // must only use the host and the configured https port - return NetUtils.createSocketAddrForHost(uri.getHost(), getDefaultSecurePort()); - } - protected URI getNamenodeUri(URI uri) { - return DFSUtil.createUri("http", getNamenodeAddr(uri)); - } - - protected URI getNamenodeSecureUri(URI uri) { - return DFSUtil.createUri("http", getNamenodeSecureAddr(uri)); + return DFSUtil.createUri(getUnderlyingProtocol(), getNamenodeAddr(uri)); } + /** + * See the documentation of {@Link #getNamenodeAddr(URI)} for the logic + * behind selecting the canonical service name. + * @return + */ @Override public String getCanonicalServiceName() { - // unlike other filesystems, hftp's service is the secure port, not the - // actual port in the uri - return SecurityUtil.buildTokenService(nnSecureUri).toString(); + return SecurityUtil.buildTokenService(nnUri).toString(); } @Override @@ -187,7 +185,6 @@ public class HftpFileSystem extends FileSystem setConf(conf); this.ugi = UserGroupInformation.getCurrentUser(); this.nnUri = getNamenodeUri(name); - this.nnSecureUri = getNamenodeSecureUri(name); try { this.hftpURI = new URI(name.getScheme(), name.getAuthority(), null, null, null); @@ -225,7 +222,7 @@ public class HftpFileSystem extends FileSystem protected Token selectDelegationToken( UserGroupInformation ugi) { - return hftpTokenSelector.selectToken(nnSecureUri, ugi.getTokens(), getConf()); + return hftpTokenSelector.selectToken(nnUri, ugi.getTokens(), getConf()); } @@ -234,6 +231,13 @@ public class HftpFileSystem extends FileSystem return renewToken; } + /** + * Return the underlying protocol that is used to talk to the namenode. + */ + protected String getUnderlyingProtocol() { + return "http"; + } + @Override public synchronized void setDelegationToken(Token token) { renewToken = token; @@ -257,7 +261,7 @@ public class HftpFileSystem extends FileSystem return ugi.doAs(new PrivilegedExceptionAction>() { @Override public Token run() throws IOException { - final String nnHttpUrl = nnSecureUri.toString(); + final String nnHttpUrl = nnUri.toString(); Credentials c; try { c = DelegationTokenFetcher.getDTfromRemote(nnHttpUrl, renewer); @@ -301,7 +305,7 @@ public class HftpFileSystem extends FileSystem * @throws IOException on error constructing the URL */ protected URL getNamenodeURL(String path, String query) throws IOException { - final URL url = new URL("http", nnUri.getHost(), + final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(), nnUri.getPort(), path + '?' + query); if (LOG.isTraceEnabled()) { LOG.trace("url=" + url); @@ -703,17 +707,20 @@ public class HftpFileSystem extends FileSystem return true; } + protected String getUnderlyingProtocol() { + return "http"; + } + @SuppressWarnings("unchecked") @Override public long renew(Token token, Configuration conf) throws IOException { // update the kerberos credentials, if they are coming from a keytab UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab(); - // use http to renew the token InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token); return DelegationTokenFetcher.renewDelegationToken - (DFSUtil.createUri("http", serviceAddr).toString(), + (DFSUtil.createUri(getUnderlyingProtocol(), serviceAddr).toString(), (Token) token); } @@ -723,10 +730,9 @@ public class HftpFileSystem extends FileSystem Configuration conf) throws IOException { // update the kerberos credentials, if they are coming from a keytab UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab(); - // use http to cancel the token InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token); DelegationTokenFetcher.cancelDelegationToken - (DFSUtil.createUri("http", serviceAddr).toString(), + (DFSUtil.createUri(getUnderlyingProtocol(), serviceAddr).toString(), (Token) token); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java index 6a3bdba593b..5f5c4836953 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HsftpFileSystem.java @@ -68,6 +68,14 @@ public class HsftpFileSystem extends HftpFileSystem { return "hsftp"; } + /** + * Return the underlying protocol that is used to talk to the namenode. + */ + @Override + protected String getUnderlyingProtocol() { + return "https"; + } + @Override public void initialize(URI name, Configuration conf) throws IOException { super.initialize(name, conf); @@ -134,24 +142,15 @@ public class HsftpFileSystem extends HftpFileSystem { @Override protected int getDefaultPort() { - return getDefaultSecurePort(); + return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, + DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT); } - @Override - protected InetSocketAddress getNamenodeSecureAddr(URI uri) { - return getNamenodeAddr(uri); - } - - @Override - protected URI getNamenodeUri(URI uri) { - return getNamenodeSecureUri(uri); - } - @Override protected HttpURLConnection openConnection(String path, String query) throws IOException { query = addDelegationTokenParam(query); - final URL url = new URL("https", nnUri.getHost(), + final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(), nnUri.getPort(), path + '?' + query); HttpsURLConnection conn; conn = (HttpsURLConnection)connectionFactory.openConnection(url); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java index 5c9d164e2ff..4e0ec8f4550 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java @@ -57,9 +57,14 @@ public class FileChecksumServlets { final String hostname = host instanceof DatanodeInfo ? ((DatanodeInfo)host).getHostName() : host.getIpAddr(); final String scheme = request.getScheme(); - final int port = "https".equals(scheme) - ? (Integer)getServletContext().getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY) - : host.getInfoPort(); + int port = host.getInfoPort(); + if ("https".equals(scheme)) { + final Integer portObject = (Integer) getServletContext().getAttribute( + DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY); + if (portObject != null) { + port = portObject; + } + } final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum"); String dtParam = ""; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java index 8beef465fac..fa6391f7fc1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java @@ -61,9 +61,14 @@ public class FileDataServlet extends DfsServlet { } else { hostname = host.getIpAddr(); } - final int port = "https".equals(scheme) - ? (Integer)getServletContext().getAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY) - : host.getInfoPort(); + int port = host.getInfoPort(); + if ("https".equals(scheme)) { + final Integer portObject = (Integer) getServletContext().getAttribute( + DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY); + if (portObject != null) { + port = portObject; + } + } String dtParam = ""; if (dt != null) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java index 6dd7545c614..5e82baa539c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpDelegationToken.java @@ -88,19 +88,21 @@ public class TestHftpDelegationToken { URI fsUri = URI.create("hftp://localhost"); MyHftpFileSystem fs = (MyHftpFileSystem) FileSystem.newInstance(fsUri, conf); assertEquals(httpPort, fs.getCanonicalUri().getPort()); - checkTokenSelection(fs, httpsPort, conf); // should still use secure port + checkTokenSelection(fs, httpPort, conf); // test with explicit default port + // Make sure it uses the port from the hftp URI. fsUri = URI.create("hftp://localhost:"+httpPort); fs = (MyHftpFileSystem) FileSystem.newInstance(fsUri, conf); assertEquals(httpPort, fs.getCanonicalUri().getPort()); - checkTokenSelection(fs, httpsPort, conf); // should still use secure port + checkTokenSelection(fs, httpPort, conf); // test with non-default port + // Make sure it uses the port from the hftp URI. fsUri = URI.create("hftp://localhost:"+(httpPort+1)); fs = (MyHftpFileSystem) FileSystem.newInstance(fsUri, conf); assertEquals(httpPort+1, fs.getCanonicalUri().getPort()); - checkTokenSelection(fs, httpsPort, conf); // should still use secure port + checkTokenSelection(fs, httpPort + 1, conf); conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 5); } @@ -178,7 +180,7 @@ public class TestHftpDelegationToken { } assertNotNull(ex); assertNotNull(ex.getCause()); - assertEquals("Unexpected end of file from server", + assertEquals("Remote host closed connection during handshake", ex.getCause().getMessage()); } finally { t.interrupt(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java index 5213db8431d..93ee402aa9f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java @@ -294,11 +294,13 @@ public class TestHftpFileSystem { HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf); assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT, fs.getDefaultPort()); - assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultSecurePort()); assertEquals(uri, fs.getUri()); + + // HFTP uses http to get the token so canonical service name should + // return the http port. assertEquals( - "127.0.0.1:"+DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, + "127.0.0.1:" + DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT, fs.getCanonicalServiceName() ); } @@ -307,17 +309,18 @@ public class TestHftpFileSystem { public void testHftpCustomDefaultPorts() throws IOException { Configuration conf = new Configuration(); conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123); - conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456); URI uri = URI.create("hftp://localhost"); HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf); assertEquals(123, fs.getDefaultPort()); - assertEquals(456, fs.getDefaultSecurePort()); assertEquals(uri, fs.getUri()); + + // HFTP uses http to get the token so canonical service name should + // return the http port. assertEquals( - "127.0.0.1:456", + "127.0.0.1:123", fs.getCanonicalServiceName() ); } @@ -329,11 +332,10 @@ public class TestHftpFileSystem { HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf); assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT, fs.getDefaultPort()); - assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultSecurePort()); assertEquals(uri, fs.getUri()); assertEquals( - "127.0.0.1:"+DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, + "127.0.0.1:123", fs.getCanonicalServiceName() ); } @@ -342,17 +344,15 @@ public class TestHftpFileSystem { public void testHftpCustomUriPortWithCustomDefaultPorts() throws IOException { Configuration conf = new Configuration(); conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123); - conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456); URI uri = URI.create("hftp://localhost:789"); HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf); assertEquals(123, fs.getDefaultPort()); - assertEquals(456, fs.getDefaultSecurePort()); assertEquals(uri, fs.getUri()); assertEquals( - "127.0.0.1:456", + "127.0.0.1:789", fs.getCanonicalServiceName() ); } @@ -366,7 +366,6 @@ public class TestHftpFileSystem { HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf); assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultPort()); - assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultSecurePort()); assertEquals(uri, fs.getUri()); assertEquals( @@ -385,7 +384,6 @@ public class TestHftpFileSystem { HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf); assertEquals(456, fs.getDefaultPort()); - assertEquals(456, fs.getDefaultSecurePort()); assertEquals(uri, fs.getUri()); assertEquals( @@ -401,7 +399,6 @@ public class TestHftpFileSystem { HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf); assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultPort()); - assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT, fs.getDefaultSecurePort()); assertEquals(uri, fs.getUri()); assertEquals( @@ -420,7 +417,6 @@ public class TestHftpFileSystem { HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf); assertEquals(456, fs.getDefaultPort()); - assertEquals(456, fs.getDefaultSecurePort()); assertEquals(uri, fs.getUri()); assertEquals(