HDFS-5325. Remove WebHdfsFileSystem#ConnRunner. Contributed by Haohui Mai.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1540235 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jing Zhao 2013-11-09 01:19:54 +00:00
parent e28015ed1b
commit 37bc8e0c14
4 changed files with 9 additions and 72 deletions

View File

@ -514,6 +514,8 @@ Release 2.3.0 - UNRELEASED
HDFS-5476. Snapshot: clean the blocks/files/directories under a renamed HDFS-5476. Snapshot: clean the blocks/files/directories under a renamed
file/directory while deletion. (jing9) file/directory while deletion. (jing9)
HDFS-5325. Remove WebHdfsFileSystem#ConnRunner. (Haohui Mai via jing9)
Release 2.2.1 - UNRELEASED Release 2.2.1 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -683,19 +683,6 @@ public class WebHdfsFileSystem extends FileSystem
} }
} }
@VisibleForTesting
final class ConnRunner extends AbstractRunner {
protected ConnRunner(final HttpOpParam.Op op, HttpURLConnection conn) {
super(op, false);
this.conn = conn;
}
@Override
protected URL getUrl() {
return null;
}
}
private FsPermission applyUMask(FsPermission permission) { private FsPermission applyUMask(FsPermission permission) {
if (permission == null) { if (permission == null) {
permission = FsPermission.getDefault(); permission = FsPermission.getDefault();

View File

@ -23,20 +23,14 @@ package org.apache.hadoop.hdfs.security;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.IOException; import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.NetworkInterface; import java.net.NetworkInterface;
import java.net.URL;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -47,23 +41,17 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
import org.apache.hadoop.hdfs.web.resources.DoAsParam;
import org.apache.hadoop.hdfs.web.resources.ExceptionHandler;
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.hdfs.web.resources.PostOpParam;
import org.apache.hadoop.hdfs.web.resources.PutOpParam;
import org.apache.hadoop.security.TestDoAsEffectiveUser; import org.apache.hadoop.security.TestDoAsEffectiveUser;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.log4j.Level;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.mockito.internal.util.reflection.Whitebox;
public class TestDelegationTokenForProxyUser { public class TestDelegationTokenForProxyUser {
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;
@ -155,56 +143,26 @@ public class TestDelegationTokenForProxyUser {
} }
} }
@Test(timeout=20000) @Test(timeout=5000)
public void testWebHdfsDoAs() throws Exception { public void testWebHdfsDoAs() throws Exception {
WebHdfsTestUtil.LOG.info("START: testWebHdfsDoAs()"); WebHdfsTestUtil.LOG.info("START: testWebHdfsDoAs()");
((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)ExceptionHandler.LOG).getLogger().setLevel(Level.ALL);
WebHdfsTestUtil.LOG.info("ugi.getShortUserName()=" + ugi.getShortUserName()); WebHdfsTestUtil.LOG.info("ugi.getShortUserName()=" + ugi.getShortUserName());
final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config); final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config);
final Path root = new Path("/"); final Path root = new Path("/");
cluster.getFileSystem().setPermission(root, new FsPermission((short)0777)); cluster.getFileSystem().setPermission(root, new FsPermission((short)0777));
{ Whitebox.setInternalState(webhdfs, "ugi", proxyUgi);
//test GETHOMEDIRECTORY with doAs
final URL url = WebHdfsTestUtil.toUrl(webhdfs,
GetOpParam.Op.GETHOMEDIRECTORY, root, new DoAsParam(PROXY_USER));
final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
final Map<?, ?> m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK);
conn.disconnect();
final Object responsePath = m.get(Path.class.getSimpleName());
WebHdfsTestUtil.LOG.info("responsePath=" + responsePath);
Assert.assertEquals("/user/" + PROXY_USER, responsePath);
}
{ {
//test GETHOMEDIRECTORY with DOas Path responsePath = webhdfs.getHomeDirectory();
final URL url = WebHdfsTestUtil.toUrl(webhdfs,
GetOpParam.Op.GETHOMEDIRECTORY, root, new DoAsParam(PROXY_USER) {
@Override
public String getName() {
return "DOas";
}
});
final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
final Map<?, ?> m = WebHdfsTestUtil.connectAndGetJson(conn, HttpServletResponse.SC_OK);
conn.disconnect();
final Object responsePath = m.get(Path.class.getSimpleName());
WebHdfsTestUtil.LOG.info("responsePath=" + responsePath); WebHdfsTestUtil.LOG.info("responsePath=" + responsePath);
Assert.assertEquals("/user/" + PROXY_USER, responsePath); Assert.assertEquals(webhdfs.getUri() + "/user/" + PROXY_USER, responsePath.toString());
} }
final Path f = new Path("/testWebHdfsDoAs/a.txt"); final Path f = new Path("/testWebHdfsDoAs/a.txt");
{ {
//test create file with doAs FSDataOutputStream out = webhdfs.create(f);
final PutOpParam.Op op = PutOpParam.Op.CREATE;
final URL url = WebHdfsTestUtil.toUrl(webhdfs, op, f, new DoAsParam(PROXY_USER));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn = WebHdfsTestUtil.twoStepWrite(webhdfs, op, conn);
final FSDataOutputStream out = WebHdfsTestUtil.write(webhdfs, op, conn, 4096);
out.write("Hello, webhdfs user!".getBytes()); out.write("Hello, webhdfs user!".getBytes());
out.close(); out.close();
@ -214,12 +172,7 @@ public class TestDelegationTokenForProxyUser {
} }
{ {
//test append file with doAs final FSDataOutputStream out = webhdfs.append(f);
final PostOpParam.Op op = PostOpParam.Op.APPEND;
final URL url = WebHdfsTestUtil.toUrl(webhdfs, op, f, new DoAsParam(PROXY_USER));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn = WebHdfsTestUtil.twoStepWrite(webhdfs, op, conn);
final FSDataOutputStream out = WebHdfsTestUtil.write(webhdfs, op, conn, 4096);
out.write("\nHello again!".getBytes()); out.write("\nHello again!".getBytes());
out.close(); out.close();

View File

@ -78,11 +78,6 @@ public class WebHdfsTestUtil {
Assert.assertEquals(expectedResponseCode, conn.getResponseCode()); Assert.assertEquals(expectedResponseCode, conn.getResponseCode());
return WebHdfsFileSystem.jsonParse(conn, false); return WebHdfsFileSystem.jsonParse(conn, false);
} }
public static HttpURLConnection twoStepWrite(final WebHdfsFileSystem webhdfs,
final HttpOpParam.Op op, HttpURLConnection conn) throws IOException {
return webhdfs.new ConnRunner(op, conn).twoStepWrite();
}
public static FSDataOutputStream write(final WebHdfsFileSystem webhdfs, public static FSDataOutputStream write(final WebHdfsFileSystem webhdfs,
final HttpOpParam.Op op, final HttpURLConnection conn, final HttpOpParam.Op op, final HttpURLConnection conn,