HDFS-5502. Merge change r1542438 from trunk.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1542439 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0cab547b2d
commit
53f2a1efe5
|
@ -84,6 +84,7 @@ import org.mortbay.jetty.webapp.WebAppContext;
|
||||||
import org.mortbay.thread.QueuedThreadPool;
|
import org.mortbay.thread.QueuedThreadPool;
|
||||||
import org.mortbay.util.MultiException;
|
import org.mortbay.util.MultiException;
|
||||||
|
|
||||||
|
import com.google.common.base.Preconditions;
|
||||||
import com.sun.jersey.spi.container.servlet.ServletContainer;
|
import com.sun.jersey.spi.container.servlet.ServletContainer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -715,6 +716,19 @@ public class HttpServer implements FilterContainer {
|
||||||
return webServer.getConnectors()[0].getLocalPort();
|
return webServer.getConnectors()[0].getLocalPort();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the port that corresponds to a particular connector. In the case of
|
||||||
|
* HDFS, the second connector corresponds to the HTTPS connector.
|
||||||
|
*
|
||||||
|
* @return the corresponding port for the connector, or -1 if there's no such
|
||||||
|
* connector.
|
||||||
|
*/
|
||||||
|
public int getConnectorPort(int index) {
|
||||||
|
Preconditions.checkArgument(index >= 0);
|
||||||
|
return index < webServer.getConnectors().length ?
|
||||||
|
webServer.getConnectors()[index].getLocalPort() : -1;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set the min, max number of worker threads (simultaneous connections).
|
* Set the min, max number of worker threads (simultaneous connections).
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -186,6 +186,8 @@ Release 2.3.0 - UNRELEASED
|
||||||
|
|
||||||
HDFS-5438. Flaws in block report processing can cause data loss. (kihwal)
|
HDFS-5438. Flaws in block report processing can cause data loss. (kihwal)
|
||||||
|
|
||||||
|
HDFS-5502. Fix HTTPS support in HsftpFileSystem. (Haohui Mai via jing9)
|
||||||
|
|
||||||
Release 2.2.1 - UNRELEASED
|
Release 2.2.1 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -793,6 +793,10 @@ public class NameNode implements NameNodeStatusMXBean {
|
||||||
return httpServer.getHttpAddress();
|
return httpServer.getHttpAddress();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public InetSocketAddress getHttpsAddress() {
|
||||||
|
return httpServer.getHttpsAddress();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Verify that configured directories exist, then
|
* Verify that configured directories exist, then
|
||||||
* Interactively confirm that formatting is desired
|
* Interactively confirm that formatting is desired
|
||||||
|
|
|
@ -119,7 +119,12 @@ public class NameNodeHttpServer {
|
||||||
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
||||||
setupServlets(httpServer, conf);
|
setupServlets(httpServer, conf);
|
||||||
httpServer.start();
|
httpServer.start();
|
||||||
httpAddress = new InetSocketAddress(bindAddress.getAddress(), httpServer.getPort());
|
httpAddress = new InetSocketAddress(bindAddress.getAddress(),
|
||||||
|
httpServer.getPort());
|
||||||
|
if (certSSL) {
|
||||||
|
httpsAddress = new InetSocketAddress(bindAddress.getAddress(),
|
||||||
|
httpServer.getConnectorPort(1));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, String> getAuthFilterParams(Configuration conf)
|
private Map<String, String> getAuthFilterParams(Configuration conf)
|
||||||
|
|
|
@ -218,6 +218,8 @@ public class DelegationTokenFetcher {
|
||||||
.append(renewer);
|
.append(renewer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boolean isHttps = nnUri.getScheme().equals("https");
|
||||||
|
|
||||||
HttpURLConnection conn = null;
|
HttpURLConnection conn = null;
|
||||||
DataInputStream dis = null;
|
DataInputStream dis = null;
|
||||||
InetSocketAddress serviceAddr = NetUtils.createSocketAddr(nnUri
|
InetSocketAddress serviceAddr = NetUtils.createSocketAddr(nnUri
|
||||||
|
@ -234,7 +236,7 @@ public class DelegationTokenFetcher {
|
||||||
dis = new DataInputStream(in);
|
dis = new DataInputStream(in);
|
||||||
ts.readFields(dis);
|
ts.readFields(dis);
|
||||||
for (Token<?> token : ts.getAllTokens()) {
|
for (Token<?> token : ts.getAllTokens()) {
|
||||||
token.setKind(HftpFileSystem.TOKEN_KIND);
|
token.setKind(isHttps ? HsftpFileSystem.TOKEN_KIND : HftpFileSystem.TOKEN_KIND);
|
||||||
SecurityUtil.setTokenService(token, serviceAddr);
|
SecurityUtil.setTokenService(token, serviceAddr);
|
||||||
}
|
}
|
||||||
return ts;
|
return ts;
|
||||||
|
|
|
@ -86,7 +86,7 @@ public class HftpFileSystem extends FileSystem
|
||||||
HttpURLConnection.setFollowRedirects(true);
|
HttpURLConnection.setFollowRedirects(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
URLConnectionFactory connectionFactory = URLConnectionFactory.DEFAULT_CONNECTION_FACTORY;
|
URLConnectionFactory connectionFactory;
|
||||||
|
|
||||||
public static final Text TOKEN_KIND = new Text("HFTP delegation");
|
public static final Text TOKEN_KIND = new Text("HFTP delegation");
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ public class HftpFileSystem extends FileSystem
|
||||||
public static final String HFTP_TIMEZONE = "UTC";
|
public static final String HFTP_TIMEZONE = "UTC";
|
||||||
public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
|
public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
|
||||||
|
|
||||||
private TokenAspect<HftpFileSystem> tokenAspect = new TokenAspect<HftpFileSystem>(this, TOKEN_KIND);
|
protected TokenAspect<HftpFileSystem> tokenAspect;
|
||||||
private Token<?> delegationToken;
|
private Token<?> delegationToken;
|
||||||
private Token<?> renewToken;
|
private Token<?> renewToken;
|
||||||
|
|
||||||
|
@ -172,6 +172,16 @@ public class HftpFileSystem extends FileSystem
|
||||||
return SCHEME;
|
return SCHEME;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize connectionFactory and tokenAspect. This function is intended to
|
||||||
|
* be overridden by HsFtpFileSystem.
|
||||||
|
*/
|
||||||
|
protected void initConnectionFactoryAndTokenAspect(Configuration conf)
|
||||||
|
throws IOException {
|
||||||
|
tokenAspect = new TokenAspect<HftpFileSystem>(this, TOKEN_KIND);
|
||||||
|
connectionFactory = URLConnectionFactory.DEFAULT_CONNECTION_FACTORY;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void initialize(final URI name, final Configuration conf)
|
public void initialize(final URI name, final Configuration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
@ -179,6 +189,7 @@ public class HftpFileSystem extends FileSystem
|
||||||
setConf(conf);
|
setConf(conf);
|
||||||
this.ugi = UserGroupInformation.getCurrentUser();
|
this.ugi = UserGroupInformation.getCurrentUser();
|
||||||
this.nnUri = getNamenodeUri(name);
|
this.nnUri = getNamenodeUri(name);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
this.hftpURI = new URI(name.getScheme(), name.getAuthority(),
|
this.hftpURI = new URI(name.getScheme(), name.getAuthority(),
|
||||||
null, null, null);
|
null, null, null);
|
||||||
|
@ -186,6 +197,7 @@ public class HftpFileSystem extends FileSystem
|
||||||
throw new IllegalArgumentException(e);
|
throw new IllegalArgumentException(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
initConnectionFactoryAndTokenAspect(conf);
|
||||||
if (UserGroupInformation.isSecurityEnabled()) {
|
if (UserGroupInformation.isSecurityEnabled()) {
|
||||||
tokenAspect.initDelegationToken(ugi);
|
tokenAspect.initDelegationToken(ugi);
|
||||||
}
|
}
|
||||||
|
@ -212,8 +224,8 @@ public class HftpFileSystem extends FileSystem
|
||||||
*
|
*
|
||||||
* For other operations, however, the client has to send a
|
* For other operations, however, the client has to send a
|
||||||
* HDFS_DELEGATION_KIND token over the wire so that it can talk to Hadoop
|
* HDFS_DELEGATION_KIND token over the wire so that it can talk to Hadoop
|
||||||
* 0.20.3 clusters. Later releases fix this problem. See HDFS-5440 for more
|
* 0.20.203 clusters. Later releases fix this problem. See HDFS-5440 for
|
||||||
* details.
|
* more details.
|
||||||
*/
|
*/
|
||||||
renewToken = token;
|
renewToken = token;
|
||||||
delegationToken = new Token<T>(token);
|
delegationToken = new Token<T>(token);
|
||||||
|
@ -229,13 +241,12 @@ public class HftpFileSystem extends FileSystem
|
||||||
return ugi.doAs(new PrivilegedExceptionAction<Token<?>>() {
|
return ugi.doAs(new PrivilegedExceptionAction<Token<?>>() {
|
||||||
@Override
|
@Override
|
||||||
public Token<?> run() throws IOException {
|
public Token<?> run() throws IOException {
|
||||||
final String nnHttpUrl = nnUri.toString();
|
|
||||||
Credentials c;
|
Credentials c;
|
||||||
try {
|
try {
|
||||||
c = DelegationTokenFetcher.getDTfromRemote(connectionFactory, nnUri, renewer);
|
c = DelegationTokenFetcher.getDTfromRemote(connectionFactory, nnUri, renewer);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
if (e.getCause() instanceof ConnectException) {
|
if (e.getCause() instanceof ConnectException) {
|
||||||
LOG.warn("Couldn't connect to " + nnHttpUrl +
|
LOG.warn("Couldn't connect to " + nnUri +
|
||||||
", assuming security is disabled");
|
", assuming security is disabled");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,31 +18,14 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hdfs.web;
|
package org.apache.hadoop.hdfs.web;
|
||||||
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.HttpURLConnection;
|
import java.security.GeneralSecurityException;
|
||||||
import java.net.InetSocketAddress;
|
|
||||||
import java.net.URI;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.security.KeyStore;
|
|
||||||
import java.security.cert.X509Certificate;
|
|
||||||
|
|
||||||
import javax.net.ssl.HostnameVerifier;
|
|
||||||
import javax.net.ssl.HttpsURLConnection;
|
|
||||||
import javax.net.ssl.KeyManager;
|
|
||||||
import javax.net.ssl.KeyManagerFactory;
|
|
||||||
import javax.net.ssl.SSLContext;
|
|
||||||
import javax.net.ssl.SSLSession;
|
|
||||||
import javax.net.ssl.TrustManager;
|
|
||||||
import javax.net.ssl.TrustManagerFactory;
|
|
||||||
import javax.net.ssl.X509TrustManager;
|
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.util.Time;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An implementation of a protocol for accessing filesystems over HTTPS. The
|
* An implementation of a protocol for accessing filesystems over HTTPS. The
|
||||||
|
@ -55,9 +38,8 @@ import org.apache.hadoop.util.Time;
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
public class HsftpFileSystem extends HftpFileSystem {
|
public class HsftpFileSystem extends HftpFileSystem {
|
||||||
|
public static final Text TOKEN_KIND = new Text("HSFTP delegation");
|
||||||
private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
|
public static final String SCHEME = "hsftp";
|
||||||
private volatile int ExpWarnDays = 0;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the protocol scheme for the FileSystem.
|
* Return the protocol scheme for the FileSystem.
|
||||||
|
@ -67,7 +49,7 @@ public class HsftpFileSystem extends HftpFileSystem {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String getScheme() {
|
public String getScheme() {
|
||||||
return "hsftp";
|
return SCHEME;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -79,66 +61,17 @@ public class HsftpFileSystem extends HftpFileSystem {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void initialize(URI name, Configuration conf) throws IOException {
|
protected void initConnectionFactoryAndTokenAspect(Configuration conf) throws IOException {
|
||||||
super.initialize(name, conf);
|
tokenAspect = new TokenAspect<HftpFileSystem>(this, TOKEN_KIND);
|
||||||
setupSsl(conf);
|
|
||||||
ExpWarnDays = conf.getInt("ssl.expiration.warn.days", 30);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
connectionFactory = new URLConnectionFactory(
|
||||||
* Set up SSL resources
|
URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT);
|
||||||
*
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
private static void setupSsl(Configuration conf) throws IOException {
|
|
||||||
Configuration sslConf = new HdfsConfiguration(false);
|
|
||||||
sslConf.addResource(conf.get(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
|
|
||||||
DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
|
|
||||||
FileInputStream fis = null;
|
|
||||||
try {
|
try {
|
||||||
SSLContext sc = SSLContext.getInstance("SSL");
|
connectionFactory.setConnConfigurator(URLConnectionFactory
|
||||||
KeyManager[] kms = null;
|
.newSslConnConfigurator(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
|
||||||
TrustManager[] tms = null;
|
conf));
|
||||||
if (sslConf.get("ssl.client.keystore.location") != null) {
|
} catch (GeneralSecurityException e) {
|
||||||
// initialize default key manager with keystore file and pass
|
throw new IOException(e);
|
||||||
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
|
|
||||||
KeyStore ks = KeyStore.getInstance(sslConf.get(
|
|
||||||
"ssl.client.keystore.type", "JKS"));
|
|
||||||
char[] ksPass = sslConf.get("ssl.client.keystore.password", "changeit")
|
|
||||||
.toCharArray();
|
|
||||||
fis = new FileInputStream(sslConf.get("ssl.client.keystore.location",
|
|
||||||
"keystore.jks"));
|
|
||||||
ks.load(fis, ksPass);
|
|
||||||
kmf.init(ks, sslConf.get("ssl.client.keystore.keypassword", "changeit")
|
|
||||||
.toCharArray());
|
|
||||||
kms = kmf.getKeyManagers();
|
|
||||||
fis.close();
|
|
||||||
fis = null;
|
|
||||||
}
|
|
||||||
// initialize default trust manager with truststore file and pass
|
|
||||||
if (sslConf.getBoolean("ssl.client.do.not.authenticate.server", false)) {
|
|
||||||
// by pass trustmanager validation
|
|
||||||
tms = new DummyTrustManager[] { new DummyTrustManager() };
|
|
||||||
} else {
|
|
||||||
TrustManagerFactory tmf = TrustManagerFactory.getInstance("PKIX");
|
|
||||||
KeyStore ts = KeyStore.getInstance(sslConf.get(
|
|
||||||
"ssl.client.truststore.type", "JKS"));
|
|
||||||
char[] tsPass = sslConf.get("ssl.client.truststore.password",
|
|
||||||
"changeit").toCharArray();
|
|
||||||
fis = new FileInputStream(sslConf.get("ssl.client.truststore.location",
|
|
||||||
"truststore.jks"));
|
|
||||||
ts.load(fis, tsPass);
|
|
||||||
tmf.init(ts);
|
|
||||||
tms = tmf.getTrustManagers();
|
|
||||||
}
|
|
||||||
sc.init(kms, tms, new java.security.SecureRandom());
|
|
||||||
HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IOException("Could not initialize SSLContext", e);
|
|
||||||
} finally {
|
|
||||||
if (fis != null) {
|
|
||||||
fis.close();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,70 +80,4 @@ public class HsftpFileSystem extends HftpFileSystem {
|
||||||
return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
|
return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
|
||||||
DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
|
DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected HttpURLConnection openConnection(String path, String query)
|
|
||||||
throws IOException {
|
|
||||||
query = addDelegationTokenParam(query);
|
|
||||||
final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(),
|
|
||||||
nnUri.getPort(), path + '?' + query);
|
|
||||||
HttpsURLConnection conn;
|
|
||||||
conn = (HttpsURLConnection)connectionFactory.openConnection(url);
|
|
||||||
// bypass hostname verification
|
|
||||||
conn.setHostnameVerifier(new DummyHostnameVerifier());
|
|
||||||
conn.setRequestMethod("GET");
|
|
||||||
conn.connect();
|
|
||||||
|
|
||||||
// check cert expiration date
|
|
||||||
final int warnDays = ExpWarnDays;
|
|
||||||
if (warnDays > 0) { // make sure only check once
|
|
||||||
ExpWarnDays = 0;
|
|
||||||
long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY + Time.now();
|
|
||||||
X509Certificate[] clientCerts = (X509Certificate[]) conn
|
|
||||||
.getLocalCertificates();
|
|
||||||
if (clientCerts != null) {
|
|
||||||
for (X509Certificate cert : clientCerts) {
|
|
||||||
long expTime = cert.getNotAfter().getTime();
|
|
||||||
if (expTime < expTimeThreshold) {
|
|
||||||
StringBuilder sb = new StringBuilder();
|
|
||||||
sb.append("\n Client certificate "
|
|
||||||
+ cert.getSubjectX500Principal().getName());
|
|
||||||
int dayOffSet = (int) ((expTime - Time.now()) / MM_SECONDS_PER_DAY);
|
|
||||||
sb.append(" have " + dayOffSet + " days to expire");
|
|
||||||
LOG.warn(sb.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return (HttpURLConnection) conn;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Dummy hostname verifier that is used to bypass hostname checking
|
|
||||||
*/
|
|
||||||
protected static class DummyHostnameVerifier implements HostnameVerifier {
|
|
||||||
@Override
|
|
||||||
public boolean verify(String hostname, SSLSession session) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Dummy trustmanager that is used to trust all server certificates
|
|
||||||
*/
|
|
||||||
protected static class DummyTrustManager implements X509TrustManager {
|
|
||||||
@Override
|
|
||||||
public void checkClientTrusted(X509Certificate[] chain, String authType) {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void checkServerTrusted(X509Certificate[] chain, String authType) {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public X509Certificate[] getAcceptedIssuers() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,6 +57,7 @@ final class TokenAspect<T extends FileSystem & Renewable> {
|
||||||
@Override
|
@Override
|
||||||
public boolean handleKind(Text kind) {
|
public boolean handleKind(Text kind) {
|
||||||
return kind.equals(HftpFileSystem.TOKEN_KIND)
|
return kind.equals(HftpFileSystem.TOKEN_KIND)
|
||||||
|
|| kind.equals(HsftpFileSystem.TOKEN_KIND)
|
||||||
|| kind.equals(WebHdfsFileSystem.TOKEN_KIND);
|
|| kind.equals(WebHdfsFileSystem.TOKEN_KIND);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,8 +76,11 @@ final class TokenAspect<T extends FileSystem & Renewable> {
|
||||||
final InetSocketAddress address = SecurityUtil.getTokenServiceAddr(token);
|
final InetSocketAddress address = SecurityUtil.getTokenServiceAddr(token);
|
||||||
Text kind = token.getKind();
|
Text kind = token.getKind();
|
||||||
final URI uri;
|
final URI uri;
|
||||||
|
|
||||||
if (kind.equals(HftpFileSystem.TOKEN_KIND)) {
|
if (kind.equals(HftpFileSystem.TOKEN_KIND)) {
|
||||||
uri = DFSUtil.createUri(HftpFileSystem.SCHEME, address);
|
uri = DFSUtil.createUri(HftpFileSystem.SCHEME, address);
|
||||||
|
} else if (kind.equals(HsftpFileSystem.TOKEN_KIND)) {
|
||||||
|
uri = DFSUtil.createUri(HsftpFileSystem.SCHEME, address);
|
||||||
} else if (kind.equals(WebHdfsFileSystem.TOKEN_KIND)) {
|
} else if (kind.equals(WebHdfsFileSystem.TOKEN_KIND)) {
|
||||||
uri = DFSUtil.createUri(WebHdfsFileSystem.SCHEME, address);
|
uri = DFSUtil.createUri(WebHdfsFileSystem.SCHEME, address);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -22,6 +22,11 @@ import java.io.IOException;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLConnection;
|
import java.net.URLConnection;
|
||||||
|
import java.security.GeneralSecurityException;
|
||||||
|
|
||||||
|
import javax.net.ssl.HostnameVerifier;
|
||||||
|
import javax.net.ssl.HttpsURLConnection;
|
||||||
|
import javax.net.ssl.SSLSocketFactory;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
@ -32,6 +37,7 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
|
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
|
||||||
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
|
import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
|
||||||
|
import org.apache.hadoop.security.ssl.SSLFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utilities for handling URLs
|
* Utilities for handling URLs
|
||||||
|
@ -64,6 +70,35 @@ public class URLConnectionFactory {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new ConnectionConfigurator for SSL connections
|
||||||
|
*/
|
||||||
|
static ConnectionConfigurator newSslConnConfigurator(final int timeout,
|
||||||
|
Configuration conf) throws IOException, GeneralSecurityException {
|
||||||
|
final SSLFactory factory;
|
||||||
|
final SSLSocketFactory sf;
|
||||||
|
final HostnameVerifier hv;
|
||||||
|
|
||||||
|
factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
|
||||||
|
factory.init();
|
||||||
|
sf = factory.createSSLSocketFactory();
|
||||||
|
hv = factory.getHostnameVerifier();
|
||||||
|
|
||||||
|
return new ConnectionConfigurator() {
|
||||||
|
@Override
|
||||||
|
public HttpURLConnection configure(HttpURLConnection conn)
|
||||||
|
throws IOException {
|
||||||
|
if (conn instanceof HttpsURLConnection) {
|
||||||
|
HttpsURLConnection c = (HttpsURLConnection) conn;
|
||||||
|
c.setSSLSocketFactory(sf);
|
||||||
|
c.setHostnameVerifier(hv);
|
||||||
|
}
|
||||||
|
URLConnectionFactory.setTimeouts(conn, timeout);
|
||||||
|
return conn;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
public URLConnectionFactory(int socketTimeout) {
|
public URLConnectionFactory(int socketTimeout) {
|
||||||
this.socketTimeout = socketTimeout;
|
this.socketTimeout = socketTimeout;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,39 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hdfs;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
public class TestNameNodeHttpServer {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSslConfiguration() throws IOException {
|
|
||||||
Configuration conf = new Configuration();
|
|
||||||
conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
|
|
||||||
System.setProperty("jetty.ssl.password", "foo");
|
|
||||||
System.setProperty("jetty.ssl.keypassword", "bar");
|
|
||||||
|
|
||||||
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
cluster.shutdown();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -18,233 +18,77 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hdfs.web;
|
package org.apache.hadoop.hdfs.web;
|
||||||
|
|
||||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
|
import static org.mockito.Matchers.anyBoolean;
|
||||||
import static org.junit.Assert.*;
|
import static org.mockito.Mockito.doReturn;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.DataOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.ServerSocket;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.Socket;
|
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.net.URISyntaxException;
|
||||||
|
import java.net.URL;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
|
||||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.web.HftpFileSystem;
|
|
||||||
import org.apache.hadoop.hdfs.web.HsftpFileSystem;
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.security.SecurityUtilTestHelper;
|
import org.apache.hadoop.security.Credentials;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||||
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.mockito.Mockito;
|
||||||
import org.mockito.internal.util.reflection.Whitebox;
|
import org.mockito.internal.util.reflection.Whitebox;
|
||||||
|
|
||||||
public class TestHftpDelegationToken {
|
public class TestHftpDelegationToken {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test whether HftpFileSystem maintain wire-compatibility for 0.20.203 when
|
||||||
|
* obtaining delegation token. See HDFS-5440 for more details.
|
||||||
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testHdfsDelegationToken() throws Exception {
|
public void testTokenCompatibilityFor203() throws IOException,
|
||||||
SecurityUtilTestHelper.setTokenServiceUseIp(true);
|
URISyntaxException, AuthenticationException {
|
||||||
|
|
||||||
final Configuration conf = new Configuration();
|
|
||||||
conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
|
|
||||||
UserGroupInformation.setConfiguration(conf);
|
|
||||||
UserGroupInformation user =
|
|
||||||
UserGroupInformation.createUserForTesting("oom",
|
|
||||||
new String[]{"memory"});
|
|
||||||
Token<?> token = new Token<TokenIdentifier>
|
|
||||||
(new byte[0], new byte[0],
|
|
||||||
DelegationTokenIdentifier.HDFS_DELEGATION_KIND,
|
|
||||||
new Text("127.0.0.1:8020"));
|
|
||||||
user.addToken(token);
|
|
||||||
Token<?> token2 = new Token<TokenIdentifier>
|
|
||||||
(null, null, new Text("other token"), new Text("127.0.0.1:8021"));
|
|
||||||
user.addToken(token2);
|
|
||||||
assertEquals("wrong tokens in user", 2, user.getTokens().size());
|
|
||||||
FileSystem fs =
|
|
||||||
user.doAs(new PrivilegedExceptionAction<FileSystem>() {
|
|
||||||
@Override
|
|
||||||
public FileSystem run() throws Exception {
|
|
||||||
return FileSystem.get(new URI("hftp://localhost:50470/"), conf);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
assertSame("wrong kind of file system", HftpFileSystem.class,
|
|
||||||
fs.getClass());
|
|
||||||
assertSame("wrong token", token,
|
|
||||||
Whitebox.getInternalState(fs, "renewToken"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSelectHftpDelegationToken() throws Exception {
|
|
||||||
SecurityUtilTestHelper.setTokenServiceUseIp(true);
|
|
||||||
|
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.setClass("fs.hftp.impl", HftpFileSystem.class, FileSystem.class);
|
HftpFileSystem fs = new HftpFileSystem();
|
||||||
|
|
||||||
int httpPort = 80;
|
Token<?> token = new Token<TokenIdentifier>(new byte[0], new byte[0],
|
||||||
int httpsPort = 443;
|
DelegationTokenIdentifier.HDFS_DELEGATION_KIND, new Text(
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, httpPort);
|
"127.0.0.1:8020"));
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, httpsPort);
|
Credentials cred = new Credentials();
|
||||||
|
cred.addToken(HftpFileSystem.TOKEN_KIND, token);
|
||||||
|
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||||
|
cred.write(new DataOutputStream(os));
|
||||||
|
|
||||||
// test with implicit default port
|
HttpURLConnection conn = mock(HttpURLConnection.class);
|
||||||
URI fsUri = URI.create("hftp://localhost");
|
doReturn(new ByteArrayInputStream(os.toByteArray())).when(conn)
|
||||||
HftpFileSystem fs = (HftpFileSystem) FileSystem.newInstance(fsUri, conf);
|
.getInputStream();
|
||||||
assertEquals(httpPort, fs.getCanonicalUri().getPort());
|
doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
|
||||||
checkTokenSelection(fs, httpPort, conf);
|
|
||||||
|
|
||||||
// test with explicit default port
|
URLConnectionFactory factory = mock(URLConnectionFactory.class);
|
||||||
// Make sure it uses the port from the hftp URI.
|
doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
|
||||||
fsUri = URI.create("hftp://localhost:"+httpPort);
|
anyBoolean());
|
||||||
fs = (HftpFileSystem) FileSystem.newInstance(fsUri, conf);
|
|
||||||
assertEquals(httpPort, fs.getCanonicalUri().getPort());
|
|
||||||
checkTokenSelection(fs, httpPort, conf);
|
|
||||||
|
|
||||||
// test with non-default port
|
fs.initialize(new URI("hftp://127.0.0.1:8020"), conf);
|
||||||
// Make sure it uses the port from the hftp URI.
|
fs.connectionFactory = factory;
|
||||||
fsUri = URI.create("hftp://localhost:"+(httpPort+1));
|
|
||||||
fs = (HftpFileSystem) FileSystem.newInstance(fsUri, conf);
|
|
||||||
assertEquals(httpPort+1, fs.getCanonicalUri().getPort());
|
|
||||||
checkTokenSelection(fs, httpPort + 1, conf);
|
|
||||||
|
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 5);
|
UserGroupInformation ugi = UserGroupInformation.createUserForTesting("foo",
|
||||||
}
|
new String[] { "bar" });
|
||||||
|
|
||||||
@Test
|
TokenAspect<HftpFileSystem> tokenAspect = new TokenAspect<HftpFileSystem>(
|
||||||
public void testSelectHsftpDelegationToken() throws Exception {
|
fs, HftpFileSystem.TOKEN_KIND);
|
||||||
SecurityUtilTestHelper.setTokenServiceUseIp(true);
|
|
||||||
|
|
||||||
Configuration conf = new Configuration();
|
tokenAspect.initDelegationToken(ugi);
|
||||||
conf.setClass("fs.hsftp.impl", HsftpFileSystem.class, FileSystem.class);
|
tokenAspect.ensureTokenInitialized();
|
||||||
|
|
||||||
int httpPort = 80;
|
Assert.assertSame(HftpFileSystem.TOKEN_KIND, fs.getRenewToken().getKind());
|
||||||
int httpsPort = 443;
|
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, httpPort);
|
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, httpsPort);
|
|
||||||
|
|
||||||
// test with implicit default port
|
Token<?> tok = (Token<?>) Whitebox.getInternalState(fs, "delegationToken");
|
||||||
URI fsUri = URI.create("hsftp://localhost");
|
Assert.assertNotSame("Not making a copy of the remote token", token, tok);
|
||||||
HsftpFileSystem fs = (HsftpFileSystem) FileSystem.newInstance(fsUri, conf);
|
Assert.assertEquals(token.getKind(), tok.getKind());
|
||||||
assertEquals(httpsPort, fs.getCanonicalUri().getPort());
|
|
||||||
checkTokenSelection(fs, httpsPort, conf);
|
|
||||||
|
|
||||||
// test with explicit default port
|
|
||||||
fsUri = URI.create("hsftp://localhost:"+httpsPort);
|
|
||||||
fs = (HsftpFileSystem) FileSystem.newInstance(fsUri, conf);
|
|
||||||
assertEquals(httpsPort, fs.getCanonicalUri().getPort());
|
|
||||||
checkTokenSelection(fs, httpsPort, conf);
|
|
||||||
|
|
||||||
// test with non-default port
|
|
||||||
fsUri = URI.create("hsftp://localhost:"+(httpsPort+1));
|
|
||||||
fs = (HsftpFileSystem) FileSystem.newInstance(fsUri, conf);
|
|
||||||
assertEquals(httpsPort+1, fs.getCanonicalUri().getPort());
|
|
||||||
checkTokenSelection(fs, httpsPort+1, conf);
|
|
||||||
|
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 5);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testInsecureRemoteCluster() throws Exception {
|
|
||||||
final ServerSocket socket = new ServerSocket(0); // just reserve a port
|
|
||||||
socket.close();
|
|
||||||
Configuration conf = new Configuration();
|
|
||||||
URI fsUri = URI.create("hsftp://localhost:"+socket.getLocalPort());
|
|
||||||
assertNull(FileSystem.newInstance(fsUri, conf).getDelegationToken(null));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testSecureClusterError() throws Exception {
|
|
||||||
final ServerSocket socket = new ServerSocket(0);
|
|
||||||
Thread t = new Thread() {
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
while (true) { // fetching does a few retries
|
|
||||||
try {
|
|
||||||
Socket s = socket.accept();
|
|
||||||
s.getOutputStream().write(1234);
|
|
||||||
s.shutdownOutput();
|
|
||||||
} catch (Exception e) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
t.start();
|
|
||||||
|
|
||||||
try {
|
|
||||||
Configuration conf = new Configuration();
|
|
||||||
URI fsUri = URI.create("hsftp://localhost:"+socket.getLocalPort());
|
|
||||||
Exception ex = null;
|
|
||||||
try {
|
|
||||||
FileSystem.newInstance(fsUri, conf).getDelegationToken(null);
|
|
||||||
} catch (Exception e) {
|
|
||||||
ex = e;
|
|
||||||
}
|
|
||||||
assertNotNull(ex);
|
|
||||||
assertNotNull(ex.getCause());
|
|
||||||
assertEquals("Remote host closed connection during handshake",
|
|
||||||
ex.getCause().getMessage());
|
|
||||||
} finally {
|
|
||||||
t.interrupt();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void checkTokenSelection(HftpFileSystem fs,
|
|
||||||
int port,
|
|
||||||
Configuration conf) throws IOException {
|
|
||||||
UserGroupInformation ugi =
|
|
||||||
UserGroupInformation.createUserForTesting(fs.getUri().getAuthority(), new String[]{});
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
TokenAspect<HftpFileSystem> aspect = (TokenAspect<HftpFileSystem>) Whitebox.getInternalState(fs, "tokenAspect");
|
|
||||||
|
|
||||||
// use ip-based tokens
|
|
||||||
SecurityUtilTestHelper.setTokenServiceUseIp(true);
|
|
||||||
|
|
||||||
// test fallback to hdfs token
|
|
||||||
Token<?> hdfsToken = new Token<TokenIdentifier>(
|
|
||||||
new byte[0], new byte[0],
|
|
||||||
DelegationTokenIdentifier.HDFS_DELEGATION_KIND,
|
|
||||||
new Text("127.0.0.1:8020"));
|
|
||||||
ugi.addToken(hdfsToken);
|
|
||||||
|
|
||||||
// test fallback to hdfs token
|
|
||||||
Token<?> token = aspect.selectDelegationToken(ugi);
|
|
||||||
assertNotNull(token);
|
|
||||||
assertEquals(hdfsToken, token);
|
|
||||||
|
|
||||||
// test hftp is favored over hdfs
|
|
||||||
Token<?> hftpToken = new Token<TokenIdentifier>(
|
|
||||||
new byte[0], new byte[0],
|
|
||||||
HftpFileSystem.TOKEN_KIND, new Text("127.0.0.1:"+port));
|
|
||||||
ugi.addToken(hftpToken);
|
|
||||||
token = aspect.selectDelegationToken(ugi);
|
|
||||||
assertNotNull(token);
|
|
||||||
assertEquals(hftpToken, token);
|
|
||||||
|
|
||||||
// switch to using host-based tokens, no token should match
|
|
||||||
SecurityUtilTestHelper.setTokenServiceUseIp(false);
|
|
||||||
token = aspect.selectDelegationToken(ugi);
|
|
||||||
assertNull(token);
|
|
||||||
|
|
||||||
// test fallback to hdfs token
|
|
||||||
hdfsToken = new Token<TokenIdentifier>(
|
|
||||||
new byte[0], new byte[0],
|
|
||||||
DelegationTokenIdentifier.HDFS_DELEGATION_KIND,
|
|
||||||
new Text("localhost:8020"));
|
|
||||||
ugi.addToken(hdfsToken);
|
|
||||||
token = aspect.selectDelegationToken(ugi);
|
|
||||||
assertNotNull(token);
|
|
||||||
assertEquals(hdfsToken, token);
|
|
||||||
|
|
||||||
// test hftp is favored over hdfs
|
|
||||||
hftpToken = new Token<TokenIdentifier>(
|
|
||||||
new byte[0], new byte[0],
|
|
||||||
HftpFileSystem.TOKEN_KIND, new Text("localhost:"+port));
|
|
||||||
ugi.addToken(hftpToken);
|
|
||||||
token = aspect.selectDelegationToken(ugi);
|
|
||||||
assertNotNull(token);
|
|
||||||
assertEquals(hftpToken, token);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertNotNull;
|
import static org.junit.Assert.assertNotNull;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
|
@ -29,23 +30,22 @@ import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLConnection;
|
import java.net.URLConnection;
|
||||||
import java.util.Random;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.impl.Log4JLogger;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.BlockLocation;
|
import org.apache.hadoop.fs.BlockLocation;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
||||||
|
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
|
||||||
import org.apache.hadoop.util.ServletUtil;
|
import org.apache.hadoop.util.ServletUtil;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -53,8 +53,10 @@ import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
public class TestHftpFileSystem {
|
public class TestHftpFileSystem {
|
||||||
private static final Random RAN = new Random();
|
private static final String BASEDIR = System.getProperty("test.build.dir",
|
||||||
|
"target/test-dir") + "/" + TestHftpFileSystem.class.getSimpleName();
|
||||||
|
private static String keystoresDir;
|
||||||
|
private static String sslConfDir;
|
||||||
private static Configuration config = null;
|
private static Configuration config = null;
|
||||||
private static MiniDFSCluster cluster = null;
|
private static MiniDFSCluster cluster = null;
|
||||||
private static String blockPoolId = null;
|
private static String blockPoolId = null;
|
||||||
|
@ -83,25 +85,28 @@ public class TestHftpFileSystem {
|
||||||
new Path("/foo\">bar/foo\">bar"), };
|
new Path("/foo\">bar/foo\">bar"), };
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setUp() throws IOException {
|
public static void setUp() throws Exception {
|
||||||
((Log4JLogger) HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);
|
|
||||||
|
|
||||||
final long seed = RAN.nextLong();
|
|
||||||
System.out.println("seed=" + seed);
|
|
||||||
RAN.setSeed(seed);
|
|
||||||
|
|
||||||
config = new Configuration();
|
config = new Configuration();
|
||||||
cluster = new MiniDFSCluster.Builder(config).numDataNodes(2).build();
|
cluster = new MiniDFSCluster.Builder(config).numDataNodes(2).build();
|
||||||
blockPoolId = cluster.getNamesystem().getBlockPoolId();
|
blockPoolId = cluster.getNamesystem().getBlockPoolId();
|
||||||
hftpUri = "hftp://"
|
hftpUri = "hftp://"
|
||||||
+ config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
|
+ config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
|
||||||
|
File base = new File(BASEDIR);
|
||||||
|
FileUtil.fullyDelete(base);
|
||||||
|
base.mkdirs();
|
||||||
|
keystoresDir = new File(BASEDIR).getAbsolutePath();
|
||||||
|
sslConfDir = KeyStoreTestUtil.getClasspathDir(TestHftpFileSystem.class);
|
||||||
|
|
||||||
|
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, config, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void tearDown() throws IOException {
|
public static void tearDown() throws Exception {
|
||||||
if (cluster != null) {
|
if (cluster != null) {
|
||||||
cluster.shutdown();
|
cluster.shutdown();
|
||||||
}
|
}
|
||||||
|
FileUtil.fullyDelete(new File(BASEDIR));
|
||||||
|
KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
@ -352,9 +357,12 @@ public class TestHftpFileSystem {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
URI uri = URI.create("hftp://localhost");
|
URI uri = URI.create("hftp://localhost");
|
||||||
HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
|
HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
|
||||||
URLConnection conn = fs.connectionFactory.openConnection(new URL("http://localhost"));
|
URLConnection conn = fs.connectionFactory.openConnection(new URL(
|
||||||
assertEquals(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, conn.getConnectTimeout());
|
"http://localhost"));
|
||||||
assertEquals(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, conn.getReadTimeout());
|
assertEquals(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
|
||||||
|
conn.getConnectTimeout());
|
||||||
|
assertEquals(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
|
||||||
|
conn.getReadTimeout());
|
||||||
}
|
}
|
||||||
|
|
||||||
// /
|
// /
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hdfs.web;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.net.InetSocketAddress;
|
||||||
|
import java.net.URI;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
public class TestHttpsFileSystem {
|
||||||
|
private static final String BASEDIR = System.getProperty("test.build.dir",
|
||||||
|
"target/test-dir") + "/" + TestHttpsFileSystem.class.getSimpleName();
|
||||||
|
|
||||||
|
private static MiniDFSCluster cluster;
|
||||||
|
private static Configuration conf;
|
||||||
|
|
||||||
|
private static String keystoresDir;
|
||||||
|
private static String sslConfDir;
|
||||||
|
private static String nnAddr;
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setUp() throws Exception {
|
||||||
|
conf = new Configuration();
|
||||||
|
conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
|
||||||
|
conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
|
||||||
|
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
|
||||||
|
|
||||||
|
File base = new File(BASEDIR);
|
||||||
|
FileUtil.fullyDelete(base);
|
||||||
|
base.mkdirs();
|
||||||
|
keystoresDir = new File(BASEDIR).getAbsolutePath();
|
||||||
|
sslConfDir = KeyStoreTestUtil.getClasspathDir(TestHttpsFileSystem.class);
|
||||||
|
|
||||||
|
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
|
||||||
|
|
||||||
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
|
cluster.waitActive();
|
||||||
|
cluster.getFileSystem().create(new Path("/test")).close();
|
||||||
|
InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
|
||||||
|
nnAddr = addr.getHostName() + ":" + addr.getPort();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void tearDown() throws Exception {
|
||||||
|
cluster.shutdown();
|
||||||
|
FileUtil.fullyDelete(new File(BASEDIR));
|
||||||
|
KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHsftpFileSystem() throws Exception {
|
||||||
|
FileSystem fs = FileSystem.get(new URI("hsftp://" + nnAddr), conf);
|
||||||
|
Assert.assertTrue(fs.exists(new Path("/test")));
|
||||||
|
fs.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,26 +0,0 @@
|
||||||
<?xml version="1.0"?>
|
|
||||||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
|
|
||||||
<configuration>
|
|
||||||
<!-- Turn off SSL server authentication for tests by default -->
|
|
||||||
<property>
|
|
||||||
<name>ssl.client.do.not.authenticate.server</name>
|
|
||||||
<value>true</value>
|
|
||||||
</property>
|
|
||||||
</configuration>
|
|
Loading…
Reference in New Issue