Revert HDFS-4983

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1547970 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Wang 2013-12-05 00:05:10 +00:00
parent f5e83a0b3e
commit 859e425dfa
7 changed files with 6 additions and 122 deletions

View File

@ -775,9 +775,6 @@ Release 2.3.0 - UNRELEASED
HDFS-4997. libhdfs doesn't return correct error codes in most cases (cmccabe) HDFS-4997. libhdfs doesn't return correct error codes in most cases (cmccabe)
HDFS-4983. Numeric usernames do not work with WebHDFS FS.
(Yongjun Zhang via wang)
Release 2.2.0 - 2013-10-13 Release 2.2.0 - 2013-10-13
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -39,7 +39,6 @@ import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMetho
import org.apache.hadoop.hdfs.web.AuthFilter; import org.apache.hadoop.hdfs.web.AuthFilter;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.hdfs.web.resources.Param; import org.apache.hadoop.hdfs.web.resources.Param;
import org.apache.hadoop.hdfs.web.resources.UserParam;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
@ -74,10 +73,7 @@ public class NameNodeHttpServer {
private void initWebHdfs(Configuration conf) throws IOException { private void initWebHdfs(Configuration conf) throws IOException {
if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) { if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) {
// set user pattern based on configuration file //add SPNEGO authentication filter for webhdfs
UserParam.setUserPattern(conf);
// add SPNEGO authentication filter for webhdfs
final String name = "SPNEGO"; final String name = "SPNEGO";
final String classname = AuthFilter.class.getName(); final String classname = AuthFilter.class.getName();
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*"; final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";

View File

@ -1,38 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.web;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.DFSConfigKeys;
/**
* This class contains constants for configuration keys used
* in WebHdfs.
*
*/
@InterfaceAudience.Private
public class WebHdfsConfigKeys extends DFSConfigKeys {
/** User name pattern key */
public static final String USER_PATTERN_KEY =
"webhdfs.user.provider.user.pattern";
/** Default user name pattern value */
public static final String USER_PATTERN_DEFAULT =
"^[A-Za-z_][A-Za-z0-9._-]*[$]?$";
}

View File

@ -157,8 +157,6 @@ public class WebHdfsFileSystem extends FileSystem
) throws IOException { ) throws IOException {
super.initialize(uri, conf); super.initialize(uri, conf);
setConf(conf); setConf(conf);
/** set user pattern based on configuration file */
UserParam.setUserPattern(conf);
connectionFactory = URLConnectionFactory connectionFactory = URLConnectionFactory
.newDefaultURLConnectionFactory(conf); .newDefaultURLConnectionFactory(conf);
initializeTokenAspect(); initializeTokenAspect();

View File

@ -17,12 +17,8 @@
*/ */
package org.apache.hadoop.hdfs.web.resources; package org.apache.hadoop.hdfs.web.resources;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import static org.apache.hadoop.hdfs.web.WebHdfsConfigKeys.USER_PATTERN_KEY;
import static org.apache.hadoop.hdfs.web.WebHdfsConfigKeys.USER_PATTERN_DEFAULT;
import com.google.common.annotations.VisibleForTesting;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -33,29 +29,8 @@ public class UserParam extends StringParam {
/** Default parameter value. */ /** Default parameter value. */
public static final String DEFAULT = ""; public static final String DEFAULT = "";
private static String userPattern = null; private static final Domain DOMAIN = new Domain(NAME,
private static Domain domain = null; Pattern.compile("^[A-Za-z_][A-Za-z0-9._-]*[$]?$"));
static {
setUserPattern(USER_PATTERN_DEFAULT);
}
@VisibleForTesting
public static String getUserPattern() {
return userPattern;
}
@VisibleForTesting
public static void setUserPattern(String pattern) {
userPattern = pattern;
Pattern pt = Pattern.compile(userPattern);
domain = new Domain(NAME, pt);
}
public static void setUserPattern(Configuration conf) {
String pattern = conf.get(USER_PATTERN_KEY, USER_PATTERN_DEFAULT);
setUserPattern(pattern);
}
private static String validateLength(String str) { private static String validateLength(String str) {
if (str == null) { if (str == null) {
@ -75,7 +50,7 @@ public class UserParam extends StringParam {
* @param str a string representation of the parameter value. * @param str a string representation of the parameter value.
*/ */
public UserParam(final String str) { public UserParam(final String str) {
super(domain, str == null || str.equals(DEFAULT)? null : validateLength(str)); super(DOMAIN, str == null || str.equals(DEFAULT)? null : validateLength(str));
} }
/** /**
@ -89,4 +64,4 @@ public class UserParam extends StringParam {
public String getName() { public String getName() {
return NAME; return NAME;
} }
} }

View File

@ -38,7 +38,6 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.TestDFSClientRetries; import org.apache.hadoop.hdfs.TestDFSClientRetries;
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
import org.apache.hadoop.hdfs.web.WebHdfsConfigKeys;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
@ -262,34 +261,6 @@ public class TestWebHDFS {
} }
} }
@Test(timeout=300000)
public void testNumericalUserName() throws Exception {
final Configuration conf = WebHdfsTestUtil.createConf();
conf.set(WebHdfsConfigKeys.USER_PATTERN_KEY, "^[A-Za-z0-9_][A-Za-z0-9._-]*[$]?$");
final MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
try {
cluster.waitActive();
WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME)
.setPermission(new Path("/"),
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
UserGroupInformation.createUserForTesting("123", new String[]{"my-group"})
.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws IOException, URISyntaxException {
FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
WebHdfsFileSystem.SCHEME);
Path d = new Path("/my-dir");
Assert.assertTrue(fs.mkdirs(d));
return null;
}
});
} finally {
cluster.shutdown();
}
}
/** /**
* WebHdfs should be enabled by default after HDFS-5532 * WebHdfs should be enabled by default after HDFS-5532
* *

View File

@ -285,19 +285,4 @@ public class TestParam {
Assert.assertEquals(expected, computed.getValue()); Assert.assertEquals(expected, computed.getValue());
} }
} }
@Test
public void testUserNameOkAfterResettingPattern() {
String oldPattern = UserParam.getUserPattern();
String newPattern = "^[A-Za-z0-9_][A-Za-z0-9._-]*[$]?$";
UserParam.setUserPattern(newPattern);
UserParam userParam = new UserParam("1x");
assertNotNull(userParam.getValue());
userParam = new UserParam("123");
assertNotNull(userParam.getValue());
UserParam.setUserPattern(oldPattern);
}
} }