HDFS-9166. Move hftp / hsftp filesystem to hfds-client. Contributed by Mingliang Liu.

This commit is contained in:
Haohui Mai 2015-09-29 10:05:58 -07:00
parent c705eab221
commit cf6fb09162
19 changed files with 321 additions and 298 deletions

View File

@ -652,4 +652,15 @@ public class DFSUtilClient {
return URI.create(HdfsConstants.HDFS_URI_SCHEME + "://"
+ namenode.getHostName() + portString);
}
/** Create a URI from the scheme and address */
public static URI createUri(String scheme, InetSocketAddress address) {
try {
return new URI(scheme, null, address.getHostName(), address.getPort(),
null, null, null);
} catch (URISyntaxException ue) {
throw new IllegalArgumentException(ue);
}
}
}

View File

@ -0,0 +1,253 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.security.token.delegation;
import com.google.common.base.Charsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.hdfs.util.IOUtilsClient;
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
import org.apache.hadoop.hdfs.web.resources.DelegationParam;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.token.Token;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URL;
@InterfaceAudience.Private
public class DelegationUtilsClient {
public static final Logger LOG = LoggerFactory.getLogger(
DelegationUtilsClient.class);
public static final String STARTUP_PROGRESS_PATH_SPEC = "/startupProgress";
public static final String GET_DELEGATION_TOKEN_PATH_SPEC = "/getDelegationToken";
public static final String RENEW_DELEGATION_TOKEN_PATH_SPEC = "/renewDelegationToken";
public static final String CANCEL_DELEGATION_TOKEN_PATH_SPEC = "/cancelDelegationToken";
public static final String TOKEN = "token";
public static final String RENEWER = "renewer";
public static final String DELEGATION_PARAMETER_NAME = DelegationParam.NAME;
private static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME + "=";
static public Credentials getDTfromRemote(URLConnectionFactory factory,
URI nnUri, String renewer, String proxyUser) throws IOException {
StringBuilder buf = new StringBuilder(nnUri.toString())
.append(GET_DELEGATION_TOKEN_PATH_SPEC);
String separator = "?";
if (renewer != null) {
buf.append("?").append(RENEWER).append("=")
.append(renewer);
separator = "&";
}
if (proxyUser != null) {
buf.append(separator).append("doas=").append(proxyUser);
}
boolean isHttps = nnUri.getScheme().equals("https");
HttpURLConnection conn = null;
DataInputStream dis = null;
InetSocketAddress serviceAddr = NetUtils.createSocketAddr(nnUri
.getAuthority());
try {
LOG.debug("Retrieving token from: {}", buf);
conn = run(factory, new URL(buf.toString()));
InputStream in = conn.getInputStream();
Credentials ts = new Credentials();
dis = new DataInputStream(in);
ts.readFields(dis);
for (Token<?> token : ts.getAllTokens()) {
token.setKind(isHttps ? WebHdfsConstants.HSFTP_TOKEN_KIND :
WebHdfsConstants.HFTP_TOKEN_KIND);
SecurityUtil.setTokenService(token, serviceAddr);
}
return ts;
} catch (Exception e) {
throw new IOException("Unable to obtain remote token", e);
} finally {
IOUtilsClient.cleanup(LOG, dis);
if (conn != null) {
conn.disconnect();
}
}
}
/**
* Cancel a Delegation Token.
* @param nnAddr the NameNode's address
* @param tok the token to cancel
* @throws IOException
* @throws AuthenticationException
*/
static public void cancelDelegationToken(URLConnectionFactory factory,
URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
AuthenticationException {
StringBuilder buf = new StringBuilder(nnAddr.toString())
.append(CANCEL_DELEGATION_TOKEN_PATH_SPEC).append("?")
.append(TOKEN).append("=")
.append(tok.encodeToUrlString());
HttpURLConnection conn = run(factory, new URL(buf.toString()));
conn.disconnect();
}
/**
* Renew a Delegation Token.
* @param nnAddr the NameNode's address
* @param tok the token to renew
* @return the Date that the token will expire next.
* @throws IOException
* @throws AuthenticationException
*/
static public long renewDelegationToken(URLConnectionFactory factory,
URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
AuthenticationException {
StringBuilder buf = new StringBuilder(nnAddr.toString())
.append(RENEW_DELEGATION_TOKEN_PATH_SPEC).append("?")
.append(TOKEN).append("=")
.append(tok.encodeToUrlString());
HttpURLConnection connection = null;
BufferedReader in = null;
try {
connection = run(factory, new URL(buf.toString()));
in = new BufferedReader(new InputStreamReader(
connection.getInputStream(), Charsets.UTF_8));
long result = Long.parseLong(in.readLine());
return result;
} catch (IOException ie) {
LOG.info("error in renew over HTTP", ie);
IOException e = getExceptionFromResponse(connection);
if (e != null) {
LOG.info("rethrowing exception from HTTP request: "
+ e.getLocalizedMessage());
throw e;
}
throw ie;
} finally {
IOUtilsClient.cleanup(LOG, in);
if (connection != null) {
connection.disconnect();
}
}
}
// parse the message and extract the name of the exception and the message
static private IOException getExceptionFromResponse(HttpURLConnection con) {
IOException e = null;
String resp;
if(con == null)
return null;
try {
resp = con.getResponseMessage();
} catch (IOException ie) { return null; }
if(resp == null || resp.isEmpty())
return null;
String exceptionClass = "", exceptionMsg = "";
String[] rs = resp.split(";");
if(rs.length < 2)
return null;
exceptionClass = rs[0];
exceptionMsg = rs[1];
LOG.info("Error response from HTTP request=" + resp +
";ec=" + exceptionClass + ";em="+exceptionMsg);
if(exceptionClass == null || exceptionClass.isEmpty())
return null;
// recreate exception objects
try {
Class<? extends Exception> ec =
Class.forName(exceptionClass).asSubclass(Exception.class);
// we are interested in constructor with String arguments
java.lang.reflect.Constructor<? extends Exception> constructor =
ec.getConstructor (new Class[] {String.class});
// create an instance
e = (IOException) constructor.newInstance (exceptionMsg);
} catch (Exception ee) {
LOG.warn("failed to create object of this class", ee);
}
if(e == null)
return null;
e.setStackTrace(new StackTraceElement[0]); // local stack is not relevant
LOG.info("Exception from HTTP response=" + e.getLocalizedMessage());
return e;
}
private static HttpURLConnection run(URLConnectionFactory factory, URL url)
throws IOException, AuthenticationException {
HttpURLConnection conn = null;
try {
conn = (HttpURLConnection) factory.openConnection(url, true);
if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
String msg = conn.getResponseMessage();
throw new IOException("Error when dealing remote token: " + msg);
}
} catch (IOException ie) {
LOG.info("Error when dealing remote token:", ie);
IOException e = getExceptionFromResponse(conn);
if (e != null) {
LOG.info("rethrowing exception from HTTP request: "
+ e.getLocalizedMessage());
throw e;
}
throw ie;
}
return conn;
}
/**
* Returns the url parameter for the given token string.
* @param tokenString
* @return url parameter
*/
public static String getDelegationTokenUrlParam(String tokenString) {
if (tokenString == null ) {
return "";
}
if (UserGroupInformation.isSecurityEnabled()) {
return SET_DELEGATION + tokenString;
} else {
return "";
}
}
}

View File

@ -16,3 +16,5 @@
org.apache.hadoop.hdfs.DistributedFileSystem
org.apache.hadoop.hdfs.web.WebHdfsFileSystem
org.apache.hadoop.hdfs.web.SWebHdfsFileSystem
org.apache.hadoop.hdfs.web.HftpFileSystem
org.apache.hadoop.hdfs.web.HsftpFileSystem

View File

@ -649,6 +649,9 @@ Release 2.8.0 - UNRELEASED
HDFS-9165. Move entries in META-INF/services/o.a.h.fs.FileSystem to
hdfs-client. (Mingliang Liu via wheat9)
HDFS-9166. Move hftp / hsftp filesystem to hfds-client.
(Mingliang Liu via wheat9)
OPTIMIZATIONS
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

View File

@ -1079,12 +1079,7 @@ public class DFSUtil {
/** Create a URI from the scheme and address */
public static URI createUri(String scheme, InetSocketAddress address) {
try {
return new URI(scheme, null, address.getHostName(), address.getPort(),
null, null, null);
} catch (URISyntaxException ue) {
throw new IllegalArgumentException(ue);
}
return DFSUtilClient.createUri(scheme, address);
}
/**

View File

@ -38,15 +38,13 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
import org.apache.hadoop.hdfs.web.resources.DelegationParam;
import org.apache.hadoop.hdfs.web.resources.DoAsParam;
import org.apache.hadoop.hdfs.web.resources.UserParam;
import org.apache.hadoop.net.NetUtils;
@ -59,23 +57,10 @@ import org.apache.hadoop.security.authorize.ProxyServers;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
@InterfaceAudience.Private
public class JspHelper {
public static final String CURRENT_CONF = "current.conf";
public static final String DELEGATION_PARAMETER_NAME = DelegationParam.NAME;
public static final String NAMENODE_ADDRESS = "nnaddr";
static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME +
"=";
private static final Log LOG = LogFactory.getLog(JspHelper.class);
/** Private constructor for preventing creating JspHelper object. */
@ -233,7 +218,7 @@ public class JspHelper {
if (UserGroupInformation.isSecurityEnabled()) {
remoteUser = request.getRemoteUser();
final String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME);
final String tokenString = request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME);
if (tokenString != null) {
// Token-based connections need only verify the effective user, and
// disallow proxying to different user. Proxy authorization checks
@ -352,22 +337,6 @@ public class JspHelper {
return username;
}
/**
* Returns the url parameter for the given token string.
* @param tokenString
* @return url parameter
*/
public static String getDelegationTokenUrlParam(String tokenString) {
if (tokenString == null ) {
return "";
}
if (UserGroupInformation.isSecurityEnabled()) {
return SET_DELEGATION + tokenString;
} else {
return "";
}
}
/**
* Returns the url parameter for the given string, prefixed with
* paramSeparator.

View File

@ -28,6 +28,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
@ -37,9 +38,6 @@ import org.apache.hadoop.security.token.Token;
@SuppressWarnings("serial")
public class CancelDelegationTokenServlet extends DfsServlet {
private static final Log LOG = LogFactory.getLog(CancelDelegationTokenServlet.class);
public static final String PATH_SPEC = "/cancelDelegationToken";
public static final String TOKEN = "token";
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
throws ServletException, IOException {
@ -57,7 +55,7 @@ public class CancelDelegationTokenServlet extends DfsServlet {
}
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
context);
String tokenString = req.getParameter(TOKEN);
String tokenString = req.getParameter(DelegationUtilsClient.TOKEN);
if (tokenString == null) {
resp.sendError(HttpServletResponse.SC_MULTIPLE_CHOICES,
"Token to renew not specified");

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
@ -70,7 +71,7 @@ public class FileChecksumServlets {
String dtParam = "";
if (UserGroupInformation.isSecurityEnabled()) {
String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
dtParam = DelegationUtilsClient.getDelegationTokenUrlParam(tokenString);
}
String addr = nn.getNameNodeAddressHostPortString();
String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
@ -66,7 +67,7 @@ public class FileDataServlet extends DfsServlet {
String dtParam = "";
if (dt != null) {
dtParam = JspHelper.getDelegationTokenUrlParam(dt);
dtParam = DelegationUtilsClient.getDelegationTokenUrlParam(dt);
}
// Add namenode address to the url params
@ -120,7 +121,7 @@ public class FileDataServlet extends DfsServlet {
final String path = ServletUtil.getDecodedPath(request, "/data");
final String encodedPath = ServletUtil.getRawPath(request, "/data");
String delegationToken = request
.getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME);
HdfsFileStatus info = nn.getFileInfo(path);
if (info != null && !info.isDir()) {

View File

@ -29,6 +29,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
@ -38,8 +39,6 @@ import org.apache.hadoop.security.UserGroupInformation;
@SuppressWarnings("serial")
public class GetDelegationTokenServlet extends DfsServlet {
private static final Log LOG = LogFactory.getLog(GetDelegationTokenServlet.class);
public static final String PATH_SPEC = "/getDelegationToken";
public static final String RENEWER = "renewer";
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
@ -58,7 +57,7 @@ public class GetDelegationTokenServlet extends DfsServlet {
}
LOG.info("Sending token: {" + ugi.getUserName() + "," + req.getRemoteAddr() +"}");
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
String renewer = req.getParameter(RENEWER);
String renewer = req.getParameter(DelegationUtilsClient.RENEWER);
final String renewerFinal = (renewer == null) ?
req.getUserPrincipal().getName() : renewer;

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
@ -239,15 +239,16 @@ public class NameNodeHttpServer {
private static void setupServlets(HttpServer2 httpServer, Configuration conf) {
httpServer.addInternalServlet("startupProgress",
StartupProgressServlet.PATH_SPEC, StartupProgressServlet.class);
DelegationUtilsClient.STARTUP_PROGRESS_PATH_SPEC,
StartupProgressServlet.class);
httpServer.addInternalServlet("getDelegationToken",
GetDelegationTokenServlet.PATH_SPEC,
DelegationUtilsClient.GET_DELEGATION_TOKEN_PATH_SPEC,
GetDelegationTokenServlet.class, true);
httpServer.addInternalServlet("renewDelegationToken",
RenewDelegationTokenServlet.PATH_SPEC,
DelegationUtilsClient.RENEW_DELEGATION_TOKEN_PATH_SPEC,
RenewDelegationTokenServlet.class, true);
httpServer.addInternalServlet("cancelDelegationToken",
CancelDelegationTokenServlet.PATH_SPEC,
DelegationUtilsClient.CANCEL_DELEGATION_TOKEN_PATH_SPEC,
CancelDelegationTokenServlet.class, true);
httpServer.addInternalServlet("fsck", "/fsck", FsckServlet.class,
true);

View File

@ -30,6 +30,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
@ -41,8 +42,6 @@ import com.google.common.base.Charsets;
@SuppressWarnings("serial")
public class RenewDelegationTokenServlet extends DfsServlet {
private static final Log LOG = LogFactory.getLog(RenewDelegationTokenServlet.class);
public static final String PATH_SPEC = "/renewDelegationToken";
public static final String TOKEN = "token";
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
@ -60,7 +59,7 @@ public class RenewDelegationTokenServlet extends DfsServlet {
return;
}
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
String tokenString = req.getParameter(TOKEN);
String tokenString = req.getParameter(DelegationUtilsClient.TOKEN);
if (tokenString == null) {
resp.sendError(HttpServletResponse.SC_MULTIPLE_CHOICES,
"Token to renew not specified");

View File

@ -52,8 +52,6 @@ public class StartupProgressServlet extends DfsServlet {
private static final String STEPS = "steps";
private static final String TOTAL = "total";
public static final String PATH_SPEC = "/startupProgress";
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws IOException {

View File

@ -17,17 +17,11 @@
*/
package org.apache.hadoop.hdfs.tools;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URL;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.Date;
@ -43,24 +37,15 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
import org.apache.hadoop.hdfs.server.namenode.CancelDelegationTokenServlet;
import org.apache.hadoop.hdfs.server.namenode.GetDelegationTokenServlet;
import org.apache.hadoop.hdfs.server.namenode.RenewDelegationTokenServlet;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.GenericOptionsParser;
import com.google.common.base.Charsets;
/**
* Fetch a DelegationToken from the current Namenode and store it in the
* specified file.
@ -186,8 +171,8 @@ public class DelegationTokenFetcher {
} else {
// otherwise we are fetching
if (webUrl != null) {
Credentials creds = getDTfromRemote(connectionFactory, new URI(
webUrl), renewer, null);
Credentials creds = DelegationUtilsClient.getDTfromRemote(
connectionFactory, new URI(webUrl), renewer, null);
creds.writeTokenStorageFile(tokenFile, conf);
for (Token<?> token : creds.getAllTokens()) {
System.out.println("Fetched token via " + webUrl + " for "
@ -208,182 +193,5 @@ public class DelegationTokenFetcher {
}
});
}
static public Credentials getDTfromRemote(URLConnectionFactory factory,
URI nnUri, String renewer, String proxyUser) throws IOException {
StringBuilder buf = new StringBuilder(nnUri.toString())
.append(GetDelegationTokenServlet.PATH_SPEC);
String separator = "?";
if (renewer != null) {
buf.append("?").append(GetDelegationTokenServlet.RENEWER).append("=")
.append(renewer);
separator = "&";
}
if (proxyUser != null) {
buf.append(separator).append("doas=").append(proxyUser);
}
boolean isHttps = nnUri.getScheme().equals("https");
HttpURLConnection conn = null;
DataInputStream dis = null;
InetSocketAddress serviceAddr = NetUtils.createSocketAddr(nnUri
.getAuthority());
try {
if(LOG.isDebugEnabled()) {
LOG.debug("Retrieving token from: " + buf);
}
conn = run(factory, new URL(buf.toString()));
InputStream in = conn.getInputStream();
Credentials ts = new Credentials();
dis = new DataInputStream(in);
ts.readFields(dis);
for (Token<?> token : ts.getAllTokens()) {
token.setKind(isHttps ? WebHdfsConstants.HSFTP_TOKEN_KIND : WebHdfsConstants.HFTP_TOKEN_KIND);
SecurityUtil.setTokenService(token, serviceAddr);
}
return ts;
} catch (Exception e) {
throw new IOException("Unable to obtain remote token", e);
} finally {
IOUtils.cleanup(LOG, dis);
if (conn != null) {
conn.disconnect();
}
}
}
/**
* Cancel a Delegation Token.
* @param nnAddr the NameNode's address
* @param tok the token to cancel
* @throws IOException
* @throws AuthenticationException
*/
static public void cancelDelegationToken(URLConnectionFactory factory,
URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
AuthenticationException {
StringBuilder buf = new StringBuilder(nnAddr.toString())
.append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
.append(CancelDelegationTokenServlet.TOKEN).append("=")
.append(tok.encodeToUrlString());
HttpURLConnection conn = run(factory, new URL(buf.toString()));
conn.disconnect();
}
/**
* Renew a Delegation Token.
* @param nnAddr the NameNode's address
* @param tok the token to renew
* @return the Date that the token will expire next.
* @throws IOException
* @throws AuthenticationException
*/
static public long renewDelegationToken(URLConnectionFactory factory,
URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
AuthenticationException {
StringBuilder buf = new StringBuilder(nnAddr.toString())
.append(RenewDelegationTokenServlet.PATH_SPEC).append("?")
.append(RenewDelegationTokenServlet.TOKEN).append("=")
.append(tok.encodeToUrlString());
HttpURLConnection connection = null;
BufferedReader in = null;
try {
connection = run(factory, new URL(buf.toString()));
in = new BufferedReader(new InputStreamReader(
connection.getInputStream(), Charsets.UTF_8));
long result = Long.parseLong(in.readLine());
return result;
} catch (IOException ie) {
LOG.info("error in renew over HTTP", ie);
IOException e = getExceptionFromResponse(connection);
if (e != null) {
LOG.info("rethrowing exception from HTTP request: "
+ e.getLocalizedMessage());
throw e;
}
throw ie;
} finally {
IOUtils.cleanup(LOG, in);
if (connection != null) {
connection.disconnect();
}
}
}
// parse the message and extract the name of the exception and the message
static private IOException getExceptionFromResponse(HttpURLConnection con) {
IOException e = null;
String resp;
if(con == null)
return null;
try {
resp = con.getResponseMessage();
} catch (IOException ie) { return null; }
if(resp == null || resp.isEmpty())
return null;
String exceptionClass = "", exceptionMsg = "";
String[] rs = resp.split(";");
if(rs.length < 2)
return null;
exceptionClass = rs[0];
exceptionMsg = rs[1];
LOG.info("Error response from HTTP request=" + resp +
";ec=" + exceptionClass + ";em="+exceptionMsg);
if(exceptionClass == null || exceptionClass.isEmpty())
return null;
// recreate exception objects
try {
Class<? extends Exception> ec =
Class.forName(exceptionClass).asSubclass(Exception.class);
// we are interested in constructor with String arguments
java.lang.reflect.Constructor<? extends Exception> constructor =
ec.getConstructor (new Class[] {String.class});
// create an instance
e = (IOException) constructor.newInstance (exceptionMsg);
} catch (Exception ee) {
LOG.warn("failed to create object of this class", ee);
}
if(e == null)
return null;
e.setStackTrace(new StackTraceElement[0]); // local stack is not relevant
LOG.info("Exception from HTTP response=" + e.getLocalizedMessage());
return e;
}
private static HttpURLConnection run(URLConnectionFactory factory, URL url)
throws IOException, AuthenticationException {
HttpURLConnection conn = null;
try {
conn = (HttpURLConnection) factory.openConnection(url, true);
if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
String msg = conn.getResponseMessage();
throw new IOException("Error when dealing remote token: " + msg);
}
} catch (IOException ie) {
LOG.info("Error when dealing remote token:", ie);
IOException e = getExceptionFromResponse(conn);
if (e != null) {
LOG.info("rethrowing exception from HTTP request: "
+ e.getLocalizedMessage());
throw e;
}
throw ie;
}
return conn;
}
}

View File

@ -46,12 +46,10 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
@ -62,6 +60,8 @@ import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ServletUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
@ -92,6 +92,7 @@ public class HftpFileSystem extends FileSystem
protected URI nnUri;
public static final Logger LOG = LoggerFactory.getLogger(HftpFileSystem.class);
public static final String HFTP_TIMEZONE = "UTC";
public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
@ -122,7 +123,7 @@ public class HftpFileSystem extends FileSystem
@Override
protected int getDefaultPort() {
return getConf().getInt(HdfsClientConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY,
DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
HdfsClientConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
}
/**
@ -140,7 +141,7 @@ public class HftpFileSystem extends FileSystem
}
protected URI getNamenodeUri(URI uri) {
return DFSUtil.createUri(getUnderlyingProtocol(), getNamenodeAddr(uri));
return DFSUtilClient.createUri(getUnderlyingProtocol(), getNamenodeAddr(uri));
}
/**
@ -246,7 +247,7 @@ public class HftpFileSystem extends FileSystem
public Token<?> run() throws IOException {
Credentials c;
try {
c = DelegationTokenFetcher.getDTfromRemote(connectionFactory,
c = DelegationUtilsClient.getDTfromRemote(connectionFactory,
nnUri, renewer, proxyUser);
} catch (IOException e) {
if (e.getCause() instanceof ConnectException) {
@ -334,7 +335,7 @@ public class HftpFileSystem extends FileSystem
tokenAspect.ensureTokenInitialized();
if (delegationToken != null) {
tokenString = delegationToken.encodeToUrlString();
return (query + JspHelper.getDelegationTokenUrlParam(tokenString));
return (query + DelegationUtilsClient.getDelegationTokenUrlParam(tokenString));
}
}
}
@ -694,8 +695,8 @@ public class HftpFileSystem extends FileSystem
public Long run() throws Exception {
InetSocketAddress serviceAddr = SecurityUtil
.getTokenServiceAddr(token);
return DelegationTokenFetcher.renewDelegationToken(connectionFactory,
DFSUtil.createUri(getUnderlyingProtocol(), serviceAddr),
return DelegationUtilsClient.renewDelegationToken(connectionFactory,
DFSUtilClient.createUri(getUnderlyingProtocol(), serviceAddr),
(Token<DelegationTokenIdentifier>) token);
}
});
@ -717,8 +718,8 @@ public class HftpFileSystem extends FileSystem
public Void run() throws Exception {
InetSocketAddress serviceAddr = SecurityUtil
.getTokenServiceAddr(token);
DelegationTokenFetcher.cancelDelegationToken(connectionFactory,
DFSUtil.createUri(getUnderlyingProtocol(), serviceAddr),
DelegationUtilsClient.cancelDelegationToken(connectionFactory,
DFSUtilClient.createUri(getUnderlyingProtocol(), serviceAddr),
(Token<DelegationTokenIdentifier>) token);
return null;
}

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.web;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
/**
@ -63,6 +62,6 @@ public class HsftpFileSystem extends HftpFileSystem {
@Override
protected int getDefaultPort() {
return getConf().getInt(HdfsClientConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
HdfsClientConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
}
}

View File

@ -1,17 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.hadoop.hdfs.web.HftpFileSystem
org.apache.hadoop.hdfs.web.HsftpFileSystem

View File

@ -21,6 +21,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
import org.apache.hadoop.hdfs.web.resources.DoAsParam;
import org.apache.hadoop.hdfs.web.resources.UserParam;
@ -93,7 +94,7 @@ public class TestJspHelper {
Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(
dtId, new DummySecretManager(0, 0, 0, 0));
String tokenString = token.encodeToUrlString();
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
when(request.getRemoteUser()).thenReturn(user);
@ -121,7 +122,7 @@ public class TestJspHelper {
//Set the name.node.address attribute in Servlet context to null
when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
.thenReturn(null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
verifyServiceInToken(context, request, "3.3.3.3:3333");
}
@ -155,7 +156,7 @@ public class TestJspHelper {
// token with no auth-ed user
request = getMockRequest(null, null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
@ -165,7 +166,7 @@ public class TestJspHelper {
// token with auth-ed user
request = getMockRequest(realUser, null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
@ -175,7 +176,7 @@ public class TestJspHelper {
// completely different user, token trumps auth
request = getMockRequest("rogue", null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
@ -185,7 +186,7 @@ public class TestJspHelper {
// expected case
request = getMockRequest(null, user, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
@ -195,7 +196,7 @@ public class TestJspHelper {
// can't proxy with a token!
request = getMockRequest(null, null, "rogue");
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
try {
JspHelper.getUGI(context, request, conf);
@ -208,7 +209,7 @@ public class TestJspHelper {
// can't proxy with a token!
request = getMockRequest(null, user, "rogue");
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
try {
JspHelper.getUGI(context, request, conf);

View File

@ -37,6 +37,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher;
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
@ -129,7 +130,7 @@ public class TestDelegationTokenRemoteFetcher {
@Test
public void testTokenRenewFail() throws AuthenticationException {
try {
DelegationTokenFetcher.renewDelegationToken(connectionFactory, serviceUrl, testToken);
DelegationUtilsClient.renewDelegationToken(connectionFactory, serviceUrl, testToken);
fail("Token fetcher shouldn't be able to renew tokens in absense of NN");
} catch (IOException ex) {
}
@ -141,7 +142,7 @@ public class TestDelegationTokenRemoteFetcher {
@Test
public void expectedTokenCancelFail() throws AuthenticationException {
try {
DelegationTokenFetcher.cancelDelegationToken(connectionFactory, serviceUrl, testToken);
DelegationUtilsClient.cancelDelegationToken(connectionFactory, serviceUrl, testToken);
fail("Token fetcher shouldn't be able to cancel tokens in absense of NN");
} catch (IOException ex) {
}
@ -155,7 +156,7 @@ public class TestDelegationTokenRemoteFetcher {
throws AuthenticationException, URISyntaxException {
bootstrap = startHttpServer(httpPort, testToken, serviceUrl);
try {
DelegationTokenFetcher.renewDelegationToken(connectionFactory, new URI(
DelegationUtilsClient.renewDelegationToken(connectionFactory, new URI(
serviceUrl.toString() + "/exception"), createToken(serviceUrl));
fail("Token fetcher shouldn't be able to renew tokens using an invalid"
+ " NN URL");
@ -172,7 +173,7 @@ public class TestDelegationTokenRemoteFetcher {
public void testCancelTokenFromHttp() throws IOException,
AuthenticationException {
bootstrap = startHttpServer(httpPort, testToken, serviceUrl);
DelegationTokenFetcher.cancelDelegationToken(connectionFactory, serviceUrl,
DelegationUtilsClient.cancelDelegationToken(connectionFactory, serviceUrl,
testToken);
if (assertionError != null)
throw assertionError;
@ -186,7 +187,7 @@ public class TestDelegationTokenRemoteFetcher {
NumberFormatException, AuthenticationException {
bootstrap = startHttpServer(httpPort, testToken, serviceUrl);
assertTrue("testRenewTokenFromHttp error",
Long.parseLong(EXP_DATE) == DelegationTokenFetcher.renewDelegationToken(
Long.parseLong(EXP_DATE) == DelegationUtilsClient.renewDelegationToken(
connectionFactory, serviceUrl, testToken));
if (assertionError != null)
throw assertionError;