HDFS-5893. HftpFileSystem.RangeHeaderUrlOpener uses the default URLConnectionFactory which does not import SSL certificates. Contributed by Haohui Mai.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1569477 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bbbe808a51
commit
a845a18c67
|
@ -435,6 +435,10 @@ Release 2.4.0 - UNRELEASED
|
||||||
|
|
||||||
HDFS-5803. TestBalancer.testBalancer0 fails. (Chen He via kihwal)
|
HDFS-5803. TestBalancer.testBalancer0 fails. (Chen He via kihwal)
|
||||||
|
|
||||||
|
HDFS-5893. HftpFileSystem.RangeHeaderUrlOpener uses the default
|
||||||
|
URLConnectionFactory which does not import SSL certificates. (Haohui Mai via
|
||||||
|
jing9)
|
||||||
|
|
||||||
BREAKDOWN OF HDFS-5698 SUBTASKS AND RELATED JIRAS
|
BREAKDOWN OF HDFS-5698 SUBTASKS AND RELATED JIRAS
|
||||||
|
|
||||||
HDFS-5717. Save FSImage header in protobuf. (Haohui Mai via jing9)
|
HDFS-5717. Save FSImage header in protobuf. (Haohui Mai via jing9)
|
||||||
|
|
|
@ -27,7 +27,6 @@ import javax.servlet.http.HttpServletResponse;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
|
||||||
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
|
@ -61,18 +60,13 @@ public class FileDataServlet extends DfsServlet {
|
||||||
} else {
|
} else {
|
||||||
hostname = host.getIpAddr();
|
hostname = host.getIpAddr();
|
||||||
}
|
}
|
||||||
int port = host.getInfoPort();
|
|
||||||
if ("https".equals(scheme)) {
|
int port = "https".equals(scheme) ? host.getInfoSecurePort() : host
|
||||||
final Integer portObject = (Integer) getServletContext().getAttribute(
|
.getInfoPort();
|
||||||
DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY);
|
|
||||||
if (portObject != null) {
|
|
||||||
port = portObject;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
String dtParam = "";
|
String dtParam = "";
|
||||||
if (dt != null) {
|
if (dt != null) {
|
||||||
dtParam=JspHelper.getDelegationTokenUrlParam(dt);
|
dtParam = JspHelper.getDelegationTokenUrlParam(dt);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add namenode address to the url params
|
// Add namenode address to the url params
|
||||||
|
|
|
@ -344,14 +344,15 @@ public class HftpFileSystem extends FileSystem
|
||||||
}
|
}
|
||||||
|
|
||||||
static class RangeHeaderUrlOpener extends ByteRangeInputStream.URLOpener {
|
static class RangeHeaderUrlOpener extends ByteRangeInputStream.URLOpener {
|
||||||
URLConnectionFactory connectionFactory = URLConnectionFactory.DEFAULT_SYSTEM_CONNECTION_FACTORY;
|
private final URLConnectionFactory connFactory;
|
||||||
|
|
||||||
RangeHeaderUrlOpener(final URL url) {
|
RangeHeaderUrlOpener(URLConnectionFactory connFactory, final URL url) {
|
||||||
super(url);
|
super(url);
|
||||||
|
this.connFactory = connFactory;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected HttpURLConnection openConnection() throws IOException {
|
protected HttpURLConnection openConnection() throws IOException {
|
||||||
return (HttpURLConnection)connectionFactory.openConnection(url);
|
return (HttpURLConnection)connFactory.openConnection(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Use HTTP Range header for specifying offset. */
|
/** Use HTTP Range header for specifying offset. */
|
||||||
|
@ -381,8 +382,9 @@ public class HftpFileSystem extends FileSystem
|
||||||
super(o, r);
|
super(o, r);
|
||||||
}
|
}
|
||||||
|
|
||||||
RangeHeaderInputStream(final URL url) {
|
RangeHeaderInputStream(URLConnectionFactory connFactory, final URL url) {
|
||||||
this(new RangeHeaderUrlOpener(url), new RangeHeaderUrlOpener(null));
|
this(new RangeHeaderUrlOpener(connFactory, url),
|
||||||
|
new RangeHeaderUrlOpener(connFactory, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -397,7 +399,7 @@ public class HftpFileSystem extends FileSystem
|
||||||
String path = "/data" + ServletUtil.encodePath(f.toUri().getPath());
|
String path = "/data" + ServletUtil.encodePath(f.toUri().getPath());
|
||||||
String query = addDelegationTokenParam("ugi=" + getEncodedUgiParameter());
|
String query = addDelegationTokenParam("ugi=" + getEncodedUgiParameter());
|
||||||
URL u = getNamenodeURL(path, query);
|
URL u = getNamenodeURL(path, query);
|
||||||
return new FSDataInputStream(new RangeHeaderInputStream(u));
|
return new FSDataInputStream(new RangeHeaderInputStream(connectionFactory, u));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -97,12 +97,13 @@ public static class MockHttpURLConnection extends HttpURLConnection {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testByteRange() throws IOException {
|
public void testByteRange() throws IOException {
|
||||||
|
URLConnectionFactory factory = mock(URLConnectionFactory.class);
|
||||||
HftpFileSystem.RangeHeaderUrlOpener ospy = spy(
|
HftpFileSystem.RangeHeaderUrlOpener ospy = spy(
|
||||||
new HftpFileSystem.RangeHeaderUrlOpener(new URL("http://test/")));
|
new HftpFileSystem.RangeHeaderUrlOpener(factory, new URL("http://test/")));
|
||||||
doReturn(new MockHttpURLConnection(ospy.getURL())).when(ospy)
|
doReturn(new MockHttpURLConnection(ospy.getURL())).when(ospy)
|
||||||
.openConnection();
|
.openConnection();
|
||||||
HftpFileSystem.RangeHeaderUrlOpener rspy = spy(
|
HftpFileSystem.RangeHeaderUrlOpener rspy = spy(
|
||||||
new HftpFileSystem.RangeHeaderUrlOpener((URL) null));
|
new HftpFileSystem.RangeHeaderUrlOpener(factory, (URL) null));
|
||||||
doReturn(new MockHttpURLConnection(rspy.getURL())).when(rspy)
|
doReturn(new MockHttpURLConnection(rspy.getURL())).when(rspy)
|
||||||
.openConnection();
|
.openConnection();
|
||||||
ByteRangeInputStream is = new HftpFileSystem.RangeHeaderInputStream(ospy, rspy);
|
ByteRangeInputStream is = new HftpFileSystem.RangeHeaderInputStream(ospy, rspy);
|
||||||
|
@ -171,12 +172,15 @@ public static class MockHttpURLConnection extends HttpURLConnection {
|
||||||
assertEquals("Should fail because incorrect response code was sent",
|
assertEquals("Should fail because incorrect response code was sent",
|
||||||
"HTTP_OK expected, received 206", e.getMessage());
|
"HTTP_OK expected, received 206", e.getMessage());
|
||||||
}
|
}
|
||||||
|
is.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPropagatedClose() throws IOException {
|
public void testPropagatedClose() throws IOException {
|
||||||
ByteRangeInputStream brs = spy(
|
URLConnectionFactory factory = mock(URLConnectionFactory.class);
|
||||||
new HftpFileSystem.RangeHeaderInputStream(new URL("http://test/")));
|
|
||||||
|
ByteRangeInputStream brs = spy(new HftpFileSystem.RangeHeaderInputStream(
|
||||||
|
factory, new URL("http://test/")));
|
||||||
|
|
||||||
InputStream mockStream = mock(InputStream.class);
|
InputStream mockStream = mock(InputStream.class);
|
||||||
doReturn(mockStream).when(brs).openInputStream();
|
doReturn(mockStream).when(brs).openInputStream();
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.web;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.io.OutputStream;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
|
||||||
|
@ -30,6 +31,7 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.http.HttpConfig;
|
import org.apache.hadoop.http.HttpConfig;
|
||||||
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
|
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
@ -65,9 +67,11 @@ public class TestHttpsFileSystem {
|
||||||
|
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
cluster.waitActive();
|
cluster.waitActive();
|
||||||
cluster.getFileSystem().create(new Path("/test")).close();
|
OutputStream os = cluster.getFileSystem().create(new Path("/test"));
|
||||||
|
os.write(23);
|
||||||
|
os.close();
|
||||||
InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
|
InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
|
||||||
nnAddr = addr.getHostName() + ":" + addr.getPort();
|
nnAddr = NetUtils.getHostPortString(addr);
|
||||||
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
|
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,6 +86,9 @@ public class TestHttpsFileSystem {
|
||||||
public void testHsftpFileSystem() throws Exception {
|
public void testHsftpFileSystem() throws Exception {
|
||||||
FileSystem fs = FileSystem.get(new URI("hsftp://" + nnAddr), conf);
|
FileSystem fs = FileSystem.get(new URI("hsftp://" + nnAddr), conf);
|
||||||
Assert.assertTrue(fs.exists(new Path("/test")));
|
Assert.assertTrue(fs.exists(new Path("/test")));
|
||||||
|
InputStream is = fs.open(new Path("/test"));
|
||||||
|
Assert.assertEquals(23, is.read());
|
||||||
|
is.close();
|
||||||
fs.close();
|
fs.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue