HDFS-4983. Merge change r1548968 from trunk.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1548972 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9368cc0dbb
commit
1c1645d17f
|
@ -147,6 +147,9 @@ Release 2.4.0 - UNRELEASED
|
||||||
|
|
||||||
HDFS-5633. Improve OfflineImageViewer to use less memory. (jing9)
|
HDFS-5633. Improve OfflineImageViewer to use less memory. (jing9)
|
||||||
|
|
||||||
|
HDFS-4983. Numeric usernames do not work with WebHDFS FS. (Yongjun Zhang via
|
||||||
|
jing9)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn)
|
HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn)
|
||||||
|
|
|
@ -157,6 +157,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
|
||||||
public static final int DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_DEFAULT = 4;
|
public static final int DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_DEFAULT = 4;
|
||||||
public static final String DFS_WEBHDFS_ENABLED_KEY = "dfs.webhdfs.enabled";
|
public static final String DFS_WEBHDFS_ENABLED_KEY = "dfs.webhdfs.enabled";
|
||||||
public static final boolean DFS_WEBHDFS_ENABLED_DEFAULT = true;
|
public static final boolean DFS_WEBHDFS_ENABLED_DEFAULT = true;
|
||||||
|
public static final String DFS_WEBHDFS_USER_PATTERN_KEY = "dfs.webhdfs.user.provider.user.pattern";
|
||||||
|
public static final String DFS_WEBHDFS_USER_PATTERN_DEFAULT = "^[A-Za-z_][A-Za-z0-9._-]*[$]?$";
|
||||||
public static final String DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled";
|
public static final String DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled";
|
||||||
public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true;
|
public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true;
|
||||||
public static final String DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup";
|
public static final String DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup";
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMetho
|
||||||
import org.apache.hadoop.hdfs.web.AuthFilter;
|
import org.apache.hadoop.hdfs.web.AuthFilter;
|
||||||
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
||||||
import org.apache.hadoop.hdfs.web.resources.Param;
|
import org.apache.hadoop.hdfs.web.resources.Param;
|
||||||
|
import org.apache.hadoop.hdfs.web.resources.UserParam;
|
||||||
import org.apache.hadoop.http.HttpServer;
|
import org.apache.hadoop.http.HttpServer;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
|
@ -82,7 +83,10 @@ public class NameNodeHttpServer {
|
||||||
.setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf,
|
.setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf,
|
||||||
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)).build();
|
DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)).build();
|
||||||
if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) {
|
if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) {
|
||||||
//add SPNEGO authentication filter for webhdfs
|
// set user pattern based on configuration file
|
||||||
|
UserParam.setUserPattern(conf.get(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT));
|
||||||
|
|
||||||
|
// add SPNEGO authentication filter for webhdfs
|
||||||
final String name = "SPNEGO";
|
final String name = "SPNEGO";
|
||||||
final String classname = AuthFilter.class.getName();
|
final String classname = AuthFilter.class.getName();
|
||||||
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
|
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
|
||||||
|
|
|
@ -167,6 +167,8 @@ public class WebHdfsFileSystem extends FileSystem
|
||||||
) throws IOException {
|
) throws IOException {
|
||||||
super.initialize(uri, conf);
|
super.initialize(uri, conf);
|
||||||
setConf(conf);
|
setConf(conf);
|
||||||
|
/** set user pattern based on configuration file */
|
||||||
|
UserParam.setUserPattern(conf.get(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT));
|
||||||
initializeTokenAspect();
|
initializeTokenAspect();
|
||||||
initializeConnectionFactory(conf);
|
initializeConnectionFactory(conf);
|
||||||
|
|
||||||
|
|
|
@ -17,8 +17,10 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.web.resources;
|
package org.apache.hadoop.hdfs.web.resources;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
|
||||||
import java.text.MessageFormat;
|
import java.text.MessageFormat;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
@ -29,8 +31,21 @@ public class UserParam extends StringParam {
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = "";
|
public static final String DEFAULT = "";
|
||||||
|
|
||||||
private static final Domain DOMAIN = new Domain(NAME,
|
private static Domain domain = new Domain(NAME, Pattern.compile(DFS_WEBHDFS_USER_PATTERN_DEFAULT));
|
||||||
Pattern.compile("^[A-Za-z_][A-Za-z0-9._-]*[$]?$"));
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public static Domain getUserPatternDomain() {
|
||||||
|
return domain;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public static void setUserPatternDomain(Domain dm) {
|
||||||
|
domain = dm;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setUserPattern(String pattern) {
|
||||||
|
domain = new Domain(NAME, Pattern.compile(pattern));
|
||||||
|
}
|
||||||
|
|
||||||
private static String validateLength(String str) {
|
private static String validateLength(String str) {
|
||||||
if (str == null) {
|
if (str == null) {
|
||||||
|
@ -50,7 +65,7 @@ public class UserParam extends StringParam {
|
||||||
* @param str a string representation of the parameter value.
|
* @param str a string representation of the parameter value.
|
||||||
*/
|
*/
|
||||||
public UserParam(final String str) {
|
public UserParam(final String str) {
|
||||||
super(DOMAIN, str == null || str.equals(DEFAULT)? null : validateLength(str));
|
super(domain, str == null || str.equals(DEFAULT)? null : validateLength(str));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -64,4 +79,4 @@ public class UserParam extends StringParam {
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1484,4 +1484,12 @@
|
||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>dfs.webhdfs.user.provider.user.pattern</name>
|
||||||
|
<value>^[A-Za-z_][A-Za-z0-9._-]*[$]?$</value>
|
||||||
|
<description>
|
||||||
|
Valid pattern for user and group names for webhdfs, it must be a valid java regex.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|
|
@ -261,6 +261,34 @@ public class TestWebHDFS {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test(timeout=300000)
|
||||||
|
public void testNumericalUserName() throws Exception {
|
||||||
|
final Configuration conf = WebHdfsTestUtil.createConf();
|
||||||
|
conf.set(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, "^[A-Za-z0-9_][A-Za-z0-9._-]*[$]?$");
|
||||||
|
final MiniDFSCluster cluster =
|
||||||
|
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
|
try {
|
||||||
|
cluster.waitActive();
|
||||||
|
WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME)
|
||||||
|
.setPermission(new Path("/"),
|
||||||
|
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
|
||||||
|
|
||||||
|
UserGroupInformation.createUserForTesting("123", new String[]{"my-group"})
|
||||||
|
.doAs(new PrivilegedExceptionAction<Void>() {
|
||||||
|
@Override
|
||||||
|
public Void run() throws IOException, URISyntaxException {
|
||||||
|
FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
|
||||||
|
WebHdfsFileSystem.SCHEME);
|
||||||
|
Path d = new Path("/my-dir");
|
||||||
|
Assert.assertTrue(fs.mkdirs(d));
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
cluster.shutdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* WebHdfs should be enabled by default after HDFS-5532
|
* WebHdfs should be enabled by default after HDFS-5532
|
||||||
*
|
*
|
||||||
|
|
|
@ -285,4 +285,19 @@ public class TestParam {
|
||||||
Assert.assertEquals(expected, computed.getValue());
|
Assert.assertEquals(expected, computed.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUserNameOkAfterResettingPattern() {
|
||||||
|
UserParam.Domain oldDomain = UserParam.getUserPatternDomain();
|
||||||
|
|
||||||
|
String newPattern = "^[A-Za-z0-9_][A-Za-z0-9._-]*[$]?$";
|
||||||
|
UserParam.setUserPattern(newPattern);
|
||||||
|
|
||||||
|
UserParam userParam = new UserParam("1x");
|
||||||
|
assertNotNull(userParam.getValue());
|
||||||
|
userParam = new UserParam("123");
|
||||||
|
assertNotNull(userParam.getValue());
|
||||||
|
|
||||||
|
UserParam.setUserPatternDomain(oldDomain);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue