HDFS-4983. Numeric usernames do not work with WebHDFS FS. Contributed by Yongjun Zhang.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1548968 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jing Zhao 2013-12-07 21:10:53 +00:00
parent 4c87a27ad8
commit 91d0b47270
8 changed files with 83 additions and 6 deletions

View File

@ -584,6 +584,9 @@ Release 2.4.0 - UNRELEASED
HDFS-5633. Improve OfflineImageViewer to use less memory. (jing9) HDFS-5633. Improve OfflineImageViewer to use less memory. (jing9)
HDFS-4983. Numeric usernames do not work with WebHDFS FS. (Yongjun Zhang via
jing9)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn) HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn)

View File

@ -164,6 +164,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
public static final int DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_DEFAULT = 4; public static final int DFS_NAMENODE_REPLICATION_STREAMS_HARD_LIMIT_DEFAULT = 4;
public static final String DFS_WEBHDFS_ENABLED_KEY = "dfs.webhdfs.enabled"; public static final String DFS_WEBHDFS_ENABLED_KEY = "dfs.webhdfs.enabled";
public static final boolean DFS_WEBHDFS_ENABLED_DEFAULT = true; public static final boolean DFS_WEBHDFS_ENABLED_DEFAULT = true;
public static final String DFS_WEBHDFS_USER_PATTERN_KEY = "dfs.webhdfs.user.provider.user.pattern";
public static final String DFS_WEBHDFS_USER_PATTERN_DEFAULT = "^[A-Za-z_][A-Za-z0-9._-]*[$]?$";
public static final String DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled"; public static final String DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled";
public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true; public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true;
public static final String DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup"; public static final String DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup";

View File

@ -39,6 +39,7 @@
import org.apache.hadoop.hdfs.web.AuthFilter; import org.apache.hadoop.hdfs.web.AuthFilter;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.hdfs.web.resources.Param; import org.apache.hadoop.hdfs.web.resources.Param;
import org.apache.hadoop.hdfs.web.resources.UserParam;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
@ -73,7 +74,10 @@ public class NameNodeHttpServer {
private void initWebHdfs(Configuration conf) throws IOException { private void initWebHdfs(Configuration conf) throws IOException {
if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) { if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) {
//add SPNEGO authentication filter for webhdfs // set user pattern based on configuration file
UserParam.setUserPattern(conf.get(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT));
// add SPNEGO authentication filter for webhdfs
final String name = "SPNEGO"; final String name = "SPNEGO";
final String classname = AuthFilter.class.getName(); final String classname = AuthFilter.class.getName();
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*"; final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";

View File

@ -157,6 +157,8 @@ public synchronized void initialize(URI uri, Configuration conf
) throws IOException { ) throws IOException {
super.initialize(uri, conf); super.initialize(uri, conf);
setConf(conf); setConf(conf);
/** set user pattern based on configuration file */
UserParam.setUserPattern(conf.get(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT));
connectionFactory = URLConnectionFactory connectionFactory = URLConnectionFactory
.newDefaultURLConnectionFactory(conf); .newDefaultURLConnectionFactory(conf);
initializeTokenAspect(); initializeTokenAspect();

View File

@ -17,7 +17,9 @@
*/ */
package org.apache.hadoop.hdfs.web.resources; package org.apache.hadoop.hdfs.web.resources;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import com.google.common.annotations.VisibleForTesting;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -29,8 +31,21 @@ public class UserParam extends StringParam {
/** Default parameter value. */ /** Default parameter value. */
public static final String DEFAULT = ""; public static final String DEFAULT = "";
private static final Domain DOMAIN = new Domain(NAME, private static Domain domain = new Domain(NAME, Pattern.compile(DFS_WEBHDFS_USER_PATTERN_DEFAULT));
Pattern.compile("^[A-Za-z_][A-Za-z0-9._-]*[$]?$"));
@VisibleForTesting
public static Domain getUserPatternDomain() {
return domain;
}
@VisibleForTesting
public static void setUserPatternDomain(Domain dm) {
domain = dm;
}
public static void setUserPattern(String pattern) {
domain = new Domain(NAME, Pattern.compile(pattern));
}
private static String validateLength(String str) { private static String validateLength(String str) {
if (str == null) { if (str == null) {
@ -50,7 +65,7 @@ private static String validateLength(String str) {
* @param str a string representation of the parameter value. * @param str a string representation of the parameter value.
*/ */
public UserParam(final String str) { public UserParam(final String str) {
super(DOMAIN, str == null || str.equals(DEFAULT)? null : validateLength(str)); super(domain, str == null || str.equals(DEFAULT)? null : validateLength(str));
} }
/** /**

View File

@ -1593,4 +1593,12 @@
</description> </description>
</property> </property>
<property>
<name>dfs.webhdfs.user.provider.user.pattern</name>
<value>^[A-Za-z_][A-Za-z0-9._-]*[$]?$</value>
<description>
Valid pattern for user and group names for webhdfs, it must be a valid java regex.
</description>
</property>
</configuration> </configuration>

View File

@ -261,6 +261,34 @@ public Void run() throws IOException, URISyntaxException {
} }
} }
@Test(timeout=300000)
public void testNumericalUserName() throws Exception {
final Configuration conf = WebHdfsTestUtil.createConf();
conf.set(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, "^[A-Za-z0-9_][A-Za-z0-9._-]*[$]?$");
final MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
try {
cluster.waitActive();
WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME)
.setPermission(new Path("/"),
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
UserGroupInformation.createUserForTesting("123", new String[]{"my-group"})
.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws IOException, URISyntaxException {
FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
WebHdfsFileSystem.SCHEME);
Path d = new Path("/my-dir");
Assert.assertTrue(fs.mkdirs(d));
return null;
}
});
} finally {
cluster.shutdown();
}
}
/** /**
* WebHdfs should be enabled by default after HDFS-5532 * WebHdfs should be enabled by default after HDFS-5532
* *

View File

@ -285,4 +285,19 @@ public void testConcatSourcesParam() {
Assert.assertEquals(expected, computed.getValue()); Assert.assertEquals(expected, computed.getValue());
} }
} }
@Test
public void testUserNameOkAfterResettingPattern() {
UserParam.Domain oldDomain = UserParam.getUserPatternDomain();
String newPattern = "^[A-Za-z0-9_][A-Za-z0-9._-]*[$]?$";
UserParam.setUserPattern(newPattern);
UserParam userParam = new UserParam("1x");
assertNotNull(userParam.getValue());
userParam = new UserParam("123");
assertNotNull(userParam.getValue());
UserParam.setUserPatternDomain(oldDomain);
}
} }