Merge -r 1327626:1327627 from trunk to branch. FIXES: HDFS-3263
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1327630 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
461b330ebf
commit
cbcb7b09a8
|
@ -170,7 +170,7 @@ public class HttpFSServer {
|
||||||
throws IOException, FileSystemAccessException {
|
throws IOException, FileSystemAccessException {
|
||||||
String hadoopUser = getEffectiveUser(user, doAs);
|
String hadoopUser = getEffectiveUser(user, doAs);
|
||||||
FileSystemAccess fsAccess = HttpFSServerWebApp.get().get(FileSystemAccess.class);
|
FileSystemAccess fsAccess = HttpFSServerWebApp.get().get(FileSystemAccess.class);
|
||||||
Configuration conf = HttpFSServerWebApp.get().get(FileSystemAccess.class).getDefaultConfiguration();
|
Configuration conf = HttpFSServerWebApp.get().get(FileSystemAccess.class).getFileSystemConfiguration();
|
||||||
return fsAccess.execute(hadoopUser, conf, executor);
|
return fsAccess.execute(hadoopUser, conf, executor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,7 +194,7 @@ public class HttpFSServer {
|
||||||
private FileSystem createFileSystem(Principal user, String doAs) throws IOException, FileSystemAccessException {
|
private FileSystem createFileSystem(Principal user, String doAs) throws IOException, FileSystemAccessException {
|
||||||
String hadoopUser = getEffectiveUser(user, doAs);
|
String hadoopUser = getEffectiveUser(user, doAs);
|
||||||
FileSystemAccess fsAccess = HttpFSServerWebApp.get().get(FileSystemAccess.class);
|
FileSystemAccess fsAccess = HttpFSServerWebApp.get().get(FileSystemAccess.class);
|
||||||
Configuration conf = HttpFSServerWebApp.get().get(FileSystemAccess.class).getDefaultConfiguration();
|
Configuration conf = HttpFSServerWebApp.get().get(FileSystemAccess.class).getFileSystemConfiguration();
|
||||||
FileSystem fs = fsAccess.createFileSystem(hadoopUser, conf);
|
FileSystem fs = fsAccess.createFileSystem(hadoopUser, conf);
|
||||||
FileSystemReleaseFilter.setFileSystem(fs);
|
FileSystemReleaseFilter.setFileSystem(fs);
|
||||||
return fs;
|
return fs;
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
package org.apache.hadoop.fs.http.server;
|
package org.apache.hadoop.fs.http.server;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.lib.server.ServerException;
|
import org.apache.hadoop.lib.server.ServerException;
|
||||||
import org.apache.hadoop.lib.service.FileSystemAccess;
|
import org.apache.hadoop.lib.service.FileSystemAccess;
|
||||||
import org.apache.hadoop.lib.servlet.ServerWebApp;
|
import org.apache.hadoop.lib.servlet.ServerWebApp;
|
||||||
|
@ -29,8 +30,9 @@ import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bootstrap class that manages the initialization and destruction of the
|
* Bootstrap class that manages the initialization and destruction of the
|
||||||
* HttpFSServer server, it is a <code>javax.servlet.ServletContextListener</code>
|
* HttpFSServer server, it is a <code>javax.servlet.ServletContextListener
|
||||||
* implementation that is wired in HttpFSServer's WAR <code>WEB-INF/web.xml</code>.
|
* </code> implementation that is wired in HttpFSServer's WAR
|
||||||
|
* <code>WEB-INF/web.xml</code>.
|
||||||
* <p/>
|
* <p/>
|
||||||
* It provides acces to the server context via the singleton {@link #get}.
|
* It provides acces to the server context via the singleton {@link #get}.
|
||||||
* <p/>
|
* <p/>
|
||||||
|
@ -38,7 +40,8 @@ import java.io.IOException;
|
||||||
* with <code>httpfs.</code>.
|
* with <code>httpfs.</code>.
|
||||||
*/
|
*/
|
||||||
public class HttpFSServerWebApp extends ServerWebApp {
|
public class HttpFSServerWebApp extends ServerWebApp {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(HttpFSServerWebApp.class);
|
private static final Logger LOG =
|
||||||
|
LoggerFactory.getLogger(HttpFSServerWebApp.class);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Server name and prefix for all configuration properties.
|
* Server name and prefix for all configuration properties.
|
||||||
|
@ -67,8 +70,8 @@ public class HttpFSServerWebApp extends ServerWebApp {
|
||||||
/**
|
/**
|
||||||
* Constructor used for testing purposes.
|
* Constructor used for testing purposes.
|
||||||
*/
|
*/
|
||||||
protected HttpFSServerWebApp(String homeDir, String configDir, String logDir, String tempDir,
|
protected HttpFSServerWebApp(String homeDir, String configDir, String logDir,
|
||||||
Configuration config) {
|
String tempDir, Configuration config) {
|
||||||
super(NAME, homeDir, configDir, logDir, tempDir, config);
|
super(NAME, homeDir, configDir, logDir, tempDir, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,9 +83,11 @@ public class HttpFSServerWebApp extends ServerWebApp {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initializes the HttpFSServer server, loads configuration and required services.
|
* Initializes the HttpFSServer server, loads configuration and required
|
||||||
|
* services.
|
||||||
*
|
*
|
||||||
* @throws ServerException thrown if HttpFSServer server could not be initialized.
|
* @throws ServerException thrown if HttpFSServer server could not be
|
||||||
|
* initialized.
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void init() throws ServerException {
|
public void init() throws ServerException {
|
||||||
|
@ -93,7 +98,8 @@ public class HttpFSServerWebApp extends ServerWebApp {
|
||||||
SERVER = this;
|
SERVER = this;
|
||||||
adminGroup = getConfig().get(getPrefixedName(CONF_ADMIN_GROUP), "admin");
|
adminGroup = getConfig().get(getPrefixedName(CONF_ADMIN_GROUP), "admin");
|
||||||
LOG.info("Connects to Namenode [{}]",
|
LOG.info("Connects to Namenode [{}]",
|
||||||
get().get(FileSystemAccess.class).getDefaultConfiguration().get("fs.default.name"));
|
get().get(FileSystemAccess.class).getFileSystemConfiguration().
|
||||||
|
get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -106,7 +112,8 @@ public class HttpFSServerWebApp extends ServerWebApp {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns HttpFSServer server singleton, configuration and services are accessible through it.
|
* Returns HttpFSServer server singleton, configuration and services are
|
||||||
|
* accessible through it.
|
||||||
*
|
*
|
||||||
* @return the HttpFSServer server singleton.
|
* @return the HttpFSServer server singleton.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -37,6 +37,6 @@ public interface FileSystemAccess {
|
||||||
|
|
||||||
public void releaseFileSystem(FileSystem fs) throws IOException;
|
public void releaseFileSystem(FileSystem fs) throws IOException;
|
||||||
|
|
||||||
public Configuration getDefaultConfiguration();
|
public Configuration getFileSystemConfiguration();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,12 +26,14 @@ public class FileSystemAccessException extends XException {
|
||||||
H01("Service property [{0}] not defined"),
|
H01("Service property [{0}] not defined"),
|
||||||
H02("Kerberos initialization failed, {0}"),
|
H02("Kerberos initialization failed, {0}"),
|
||||||
H03("FileSystemExecutor error, {0}"),
|
H03("FileSystemExecutor error, {0}"),
|
||||||
H04("JobClientExecutor error, {0}"),
|
H04("Invalid configuration, it has not be created by the FileSystemAccessService"),
|
||||||
H05("[{0}] validation failed, {1}"),
|
H05("[{0}] validation failed, {1}"),
|
||||||
H06("Property [{0}] not defined in configuration object"),
|
H06("Property [{0}] not defined in configuration object"),
|
||||||
H07("[{0}] not healthy, {1}"),
|
H07("[{0}] not healthy, {1}"),
|
||||||
H08(""),
|
H08("{0}"),
|
||||||
H09("Invalid FileSystemAccess security mode [{0}]");
|
H09("Invalid FileSystemAccess security mode [{0}]"),
|
||||||
|
H10("Hadoop config directory not found [{0}]"),
|
||||||
|
H11("Could not load Hadoop config files, {0}");
|
||||||
|
|
||||||
private String template;
|
private String template;
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,9 @@
|
||||||
package org.apache.hadoop.lib.service.hadoop;
|
package org.apache.hadoop.lib.service.hadoop;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.lib.server.BaseService;
|
import org.apache.hadoop.lib.server.BaseService;
|
||||||
import org.apache.hadoop.lib.server.ServiceException;
|
import org.apache.hadoop.lib.server.ServiceException;
|
||||||
import org.apache.hadoop.lib.service.FileSystemAccess;
|
import org.apache.hadoop.lib.service.FileSystemAccess;
|
||||||
|
@ -32,6 +34,7 @@ import org.apache.hadoop.util.VersionInfo;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
@ -54,9 +57,11 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
|
||||||
|
|
||||||
public static final String NAME_NODE_WHITELIST = "name.node.whitelist";
|
public static final String NAME_NODE_WHITELIST = "name.node.whitelist";
|
||||||
|
|
||||||
private static final String HADOOP_CONF_PREFIX = "conf:";
|
public static final String HADOOP_CONF_DIR = "config.dir";
|
||||||
|
|
||||||
private static final String NAME_NODE_PROPERTY = "fs.default.name";
|
private static final String[] HADOOP_CONF_FILES = {"core-site.xml", "hdfs-site.xml"};
|
||||||
|
|
||||||
|
private static final String FILE_SYSTEM_SERVICE_CREATED = "FileSystemAccessService.created";
|
||||||
|
|
||||||
public FileSystemAccessService() {
|
public FileSystemAccessService() {
|
||||||
super(PREFIX);
|
super(PREFIX);
|
||||||
|
@ -102,26 +107,40 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
|
||||||
throw new ServiceException(FileSystemAccessException.ERROR.H09, security);
|
throw new ServiceException(FileSystemAccessException.ERROR.H09, security);
|
||||||
}
|
}
|
||||||
|
|
||||||
serviceHadoopConf = new Configuration(false);
|
String hadoopConfDirProp = getServiceConfig().get(HADOOP_CONF_DIR, getServer().getConfigDir());
|
||||||
for (Map.Entry entry : getServiceConfig()) {
|
File hadoopConfDir = new File(hadoopConfDirProp).getAbsoluteFile();
|
||||||
String name = (String) entry.getKey();
|
if (hadoopConfDir == null) {
|
||||||
if (name.startsWith(HADOOP_CONF_PREFIX)) {
|
hadoopConfDir = new File(getServer().getConfigDir()).getAbsoluteFile();
|
||||||
name = name.substring(HADOOP_CONF_PREFIX.length());
|
|
||||||
String value = (String) entry.getValue();
|
|
||||||
serviceHadoopConf.set(name, value);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
if (!hadoopConfDir.exists()) {
|
||||||
|
throw new ServiceException(FileSystemAccessException.ERROR.H10, hadoopConfDir);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
serviceHadoopConf = loadHadoopConf(hadoopConfDir);
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new ServiceException(FileSystemAccessException.ERROR.H11, ex.toString(), ex);
|
||||||
}
|
}
|
||||||
setRequiredServiceHadoopConf(serviceHadoopConf);
|
|
||||||
|
|
||||||
LOG.debug("FileSystemAccess default configuration:");
|
LOG.debug("FileSystemAccess FileSystem configuration:");
|
||||||
for (Map.Entry entry : serviceHadoopConf) {
|
for (Map.Entry entry : serviceHadoopConf) {
|
||||||
LOG.debug(" {} = {}", entry.getKey(), entry.getValue());
|
LOG.debug(" {} = {}", entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
|
setRequiredServiceHadoopConf(serviceHadoopConf);
|
||||||
|
|
||||||
nameNodeWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(NAME_NODE_WHITELIST));
|
nameNodeWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(NAME_NODE_WHITELIST));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Configuration loadHadoopConf(File dir) throws IOException {
|
||||||
|
Configuration hadoopConf = new Configuration(false);
|
||||||
|
for (String file : HADOOP_CONF_FILES) {
|
||||||
|
File f = new File(dir, file);
|
||||||
|
if (f.exists()) {
|
||||||
|
hadoopConf.addResource(new Path(f.getAbsolutePath()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return hadoopConf;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void postInit() throws ServiceException {
|
public void postInit() throws ServiceException {
|
||||||
super.postInit();
|
super.postInit();
|
||||||
|
@ -166,17 +185,6 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
|
||||||
conf.set("fs.hdfs.impl.disable.cache", "true");
|
conf.set("fs.hdfs.impl.disable.cache", "true");
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Configuration createHadoopConf(Configuration conf) {
|
|
||||||
Configuration hadoopConf = new Configuration();
|
|
||||||
ConfigurationUtils.copy(serviceHadoopConf, hadoopConf);
|
|
||||||
ConfigurationUtils.copy(conf, hadoopConf);
|
|
||||||
return hadoopConf;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected Configuration createNameNodeConf(Configuration conf) {
|
|
||||||
return createHadoopConf(conf);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected FileSystem createFileSystem(Configuration namenodeConf) throws IOException {
|
protected FileSystem createFileSystem(Configuration namenodeConf) throws IOException {
|
||||||
return FileSystem.get(namenodeConf);
|
return FileSystem.get(namenodeConf);
|
||||||
}
|
}
|
||||||
|
@ -202,16 +210,22 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
|
||||||
Check.notEmpty(user, "user");
|
Check.notEmpty(user, "user");
|
||||||
Check.notNull(conf, "conf");
|
Check.notNull(conf, "conf");
|
||||||
Check.notNull(executor, "executor");
|
Check.notNull(executor, "executor");
|
||||||
if (conf.get(NAME_NODE_PROPERTY) == null || conf.getTrimmed(NAME_NODE_PROPERTY).length() == 0) {
|
if (!conf.getBoolean(FILE_SYSTEM_SERVICE_CREATED, false)) {
|
||||||
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H06, NAME_NODE_PROPERTY);
|
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H04);
|
||||||
|
}
|
||||||
|
if (conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY) == null ||
|
||||||
|
conf.getTrimmed(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY).length() == 0) {
|
||||||
|
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H06,
|
||||||
|
CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
validateNamenode(new URI(conf.get(NAME_NODE_PROPERTY)).getAuthority());
|
validateNamenode(
|
||||||
|
new URI(conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)).
|
||||||
|
getAuthority());
|
||||||
UserGroupInformation ugi = getUGI(user);
|
UserGroupInformation ugi = getUGI(user);
|
||||||
return ugi.doAs(new PrivilegedExceptionAction<T>() {
|
return ugi.doAs(new PrivilegedExceptionAction<T>() {
|
||||||
public T run() throws Exception {
|
public T run() throws Exception {
|
||||||
Configuration namenodeConf = createNameNodeConf(conf);
|
FileSystem fs = createFileSystem(conf);
|
||||||
FileSystem fs = createFileSystem(namenodeConf);
|
|
||||||
Instrumentation instrumentation = getServer().get(Instrumentation.class);
|
Instrumentation instrumentation = getServer().get(Instrumentation.class);
|
||||||
Instrumentation.Cron cron = instrumentation.createCron();
|
Instrumentation.Cron cron = instrumentation.createCron();
|
||||||
try {
|
try {
|
||||||
|
@ -236,13 +250,16 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
|
||||||
throws IOException, FileSystemAccessException {
|
throws IOException, FileSystemAccessException {
|
||||||
Check.notEmpty(user, "user");
|
Check.notEmpty(user, "user");
|
||||||
Check.notNull(conf, "conf");
|
Check.notNull(conf, "conf");
|
||||||
|
if (!conf.getBoolean(FILE_SYSTEM_SERVICE_CREATED, false)) {
|
||||||
|
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H04);
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
validateNamenode(new URI(conf.get(NAME_NODE_PROPERTY)).getAuthority());
|
validateNamenode(
|
||||||
|
new URI(conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)).getAuthority());
|
||||||
UserGroupInformation ugi = getUGI(user);
|
UserGroupInformation ugi = getUGI(user);
|
||||||
return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
|
return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
|
||||||
public FileSystem run() throws Exception {
|
public FileSystem run() throws Exception {
|
||||||
Configuration namenodeConf = createNameNodeConf(conf);
|
return createFileSystem(conf);
|
||||||
return createFileSystem(namenodeConf);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
|
@ -267,11 +284,11 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
|
||||||
closeFileSystem(fs);
|
closeFileSystem(fs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Configuration getDefaultConfiguration() {
|
public Configuration getFileSystemConfiguration() {
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(true);
|
||||||
ConfigurationUtils.copy(serviceHadoopConf, conf);
|
ConfigurationUtils.copy(serviceHadoopConf, conf);
|
||||||
|
conf.setBoolean(FILE_SYSTEM_SERVICE_CREATED, true);
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -153,29 +153,6 @@
|
||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<!-- FileSystemAccess Namenode Configuration -->
|
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>namenode.hostname</name>
|
|
||||||
<value>localhost</value>
|
|
||||||
<description>
|
|
||||||
The HDFS Namenode host the httpfs server connects to perform file
|
|
||||||
system operations.
|
|
||||||
|
|
||||||
This property is only used to resolve other properties within this
|
|
||||||
configuration file.
|
|
||||||
</description>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>httpfs.hadoop.conf:fs.default.name</name>
|
|
||||||
<value>hdfs://${namenode.hostname}:8020</value>
|
|
||||||
<description>
|
|
||||||
The HDFS Namenode URI the httpfs server connects to perform file
|
|
||||||
system operations.
|
|
||||||
</description>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
<!-- FileSystemAccess Namenode Security Configuration -->
|
<!-- FileSystemAccess Namenode Security Configuration -->
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
|
@ -206,12 +183,4 @@
|
||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>httpfs.hadoop.conf:dfs.namenode.kerberos.principal</name>
|
|
||||||
<value>hdfs/${namenode.hostname}@${kerberos.realm}</value>
|
|
||||||
<description>
|
|
||||||
The HDFS Namenode Kerberos principal.
|
|
||||||
</description>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|
|
@ -37,13 +37,13 @@ Hadoop HDFS over HTTP ${project.version} - Server Setup
|
||||||
|
|
||||||
* Configure HttpFS
|
* Configure HttpFS
|
||||||
|
|
||||||
Edit the <<<httpfs-${project.version}/conf/httpfs-site.xml>>> file and
|
By default, HttpFS assumes that Hadoop configuration files
|
||||||
set the <<<httpfs.fsAccess.conf:fs.default.name>>> property to the HDFS
|
(<<<core-site.xml & hdfs-site.xml>>>) are in the HttpFS
|
||||||
Namenode URI. For example:
|
configuration directory.
|
||||||
|
|
||||||
+---+
|
If this is not the case, add to the <<<httpfs-site.xml>>> file the
|
||||||
httpfs.fsAccess.conf:fs.default.name=hdfs://localhost:8021
|
<<<httpfs.hadoop.config.dir>>> property set to the location
|
||||||
+---+
|
of the Hadoop configuration directory.
|
||||||
|
|
||||||
* Configure Hadoop
|
* Configure Hadoop
|
||||||
|
|
||||||
|
@ -53,11 +53,11 @@ httpfs.fsAccess.conf:fs.default.name=hdfs://localhost:8021
|
||||||
+---+
|
+---+
|
||||||
...
|
...
|
||||||
<property>
|
<property>
|
||||||
<name>fsAccess.proxyuser.#HTTPFSUSER#.hosts</name>
|
<name>hadoop.proxyuser.#HTTPFSUSER#.hosts</name>
|
||||||
<value>httpfs-host.foo.com</value>
|
<value>httpfs-host.foo.com</value>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>fsAccess.proxyuser.#HTTPFSUSER#.groups</name>
|
<name>hadoop.proxyuser.#HTTPFSUSER#.groups</name>
|
||||||
<value>*</value>
|
<value>*</value>
|
||||||
</property>
|
</property>
|
||||||
...
|
...
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
package org.apache.hadoop.fs.http.client;
|
package org.apache.hadoop.fs.http.client;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.ContentSummary;
|
import org.apache.hadoop.fs.ContentSummary;
|
||||||
import org.apache.hadoop.fs.FileChecksum;
|
import org.apache.hadoop.fs.FileChecksum;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
|
@ -70,16 +71,24 @@ public class TestHttpFSFileSystem extends HFSTestCase {
|
||||||
w.write("secret");
|
w.write("secret");
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
|
//HDFS configuration
|
||||||
|
String fsDefaultName = TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
|
conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
|
||||||
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups", HadoopUsersConfTestHelper
|
File hdfsSite = new File(new File(homeDir, "conf"), "hdfs-site.xml");
|
||||||
.getHadoopProxyUserGroups());
|
OutputStream os = new FileOutputStream(hdfsSite);
|
||||||
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper
|
conf.writeXml(os);
|
||||||
.getHadoopProxyUserHosts());
|
os.close();
|
||||||
|
|
||||||
|
//HTTPFS configuration
|
||||||
|
conf = new Configuration(false);
|
||||||
|
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
|
||||||
|
HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
|
||||||
|
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
|
||||||
|
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
|
||||||
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
|
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
|
||||||
File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||||
OutputStream os = new FileOutputStream(hoopSite);
|
os = new FileOutputStream(httpfsSite);
|
||||||
conf.writeXml(os);
|
conf.writeXml(os);
|
||||||
os.close();
|
os.close();
|
||||||
|
|
||||||
|
|
|
@ -20,10 +20,12 @@ package org.apache.hadoop.fs.http.server;
|
||||||
|
|
||||||
import junit.framework.Assert;
|
import junit.framework.Assert;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.lib.service.security.DummyGroupMapping;
|
import org.apache.hadoop.lib.server.Service;
|
||||||
|
import org.apache.hadoop.lib.server.ServiceException;
|
||||||
|
import org.apache.hadoop.lib.service.Groups;
|
||||||
import org.apache.hadoop.test.HFSTestCase;
|
import org.apache.hadoop.test.HFSTestCase;
|
||||||
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
|
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
|
||||||
import org.apache.hadoop.test.TestDir;
|
import org.apache.hadoop.test.TestDir;
|
||||||
|
@ -40,12 +42,15 @@ import java.io.BufferedReader;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.FileWriter;
|
import java.io.FileWriter;
|
||||||
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.text.MessageFormat;
|
import java.text.MessageFormat;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class TestHttpFSServer extends HFSTestCase {
|
public class TestHttpFSServer extends HFSTestCase {
|
||||||
|
|
||||||
|
@ -54,12 +59,48 @@ public class TestHttpFSServer extends HFSTestCase {
|
||||||
@TestJetty
|
@TestJetty
|
||||||
public void server() throws Exception {
|
public void server() throws Exception {
|
||||||
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
||||||
Configuration hoopConf = new Configuration(false);
|
|
||||||
HttpFSServerWebApp server = new HttpFSServerWebApp(dir, dir, dir, dir, hoopConf);
|
Configuration httpfsConf = new Configuration(false);
|
||||||
|
HttpFSServerWebApp server = new HttpFSServerWebApp(dir, dir, dir, dir, httpfsConf);
|
||||||
server.init();
|
server.init();
|
||||||
server.destroy();
|
server.destroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class MockGroups implements Service,Groups {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void init(org.apache.hadoop.lib.server.Server server) throws ServiceException {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void postInit() throws ServiceException {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void destroy() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Class[] getServiceDependencies() {
|
||||||
|
return new Class[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Class getInterface() {
|
||||||
|
return Groups.class;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serverStatusChange(org.apache.hadoop.lib.server.Server.Status oldStatus,
|
||||||
|
org.apache.hadoop.lib.server.Server.Status newStatus) throws ServiceException {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> getGroups(String user) throws IOException {
|
||||||
|
return Arrays.asList(HadoopUsersConfTestHelper.getHadoopUserGroups(user));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
private void createHttpFSServer() throws Exception {
|
private void createHttpFSServer() throws Exception {
|
||||||
File homeDir = TestDirHelper.getTestDir();
|
File homeDir = TestDirHelper.getTestDir();
|
||||||
Assert.assertTrue(new File(homeDir, "conf").mkdir());
|
Assert.assertTrue(new File(homeDir, "conf").mkdir());
|
||||||
|
@ -72,13 +113,29 @@ public class TestHttpFSServer extends HFSTestCase {
|
||||||
w.write("secret");
|
w.write("secret");
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
String fsDefaultName = TestHdfsHelper.getHdfsConf().get("fs.default.name");
|
//HDFS configuration
|
||||||
|
File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
|
||||||
|
hadoopConfDir.mkdirs();
|
||||||
|
String fsDefaultName = TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("httpfs.hadoop.conf:fs.default.name", fsDefaultName);
|
conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
|
||||||
conf.set("httpfs.groups." + CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, DummyGroupMapping.class.getName());
|
File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
|
||||||
|
OutputStream os = new FileOutputStream(hdfsSite);
|
||||||
|
conf.writeXml(os);
|
||||||
|
os.close();
|
||||||
|
|
||||||
|
//HTTPFS configuration
|
||||||
|
conf = new Configuration(false);
|
||||||
|
conf.set("httpfs.services.ext", MockGroups.class.getName());
|
||||||
|
conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.
|
||||||
|
getHadoopUserGroups(HadoopUsersConfTestHelper.getHadoopUsers()[0])[0]);
|
||||||
|
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups",
|
||||||
|
HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
|
||||||
|
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
|
||||||
|
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
|
||||||
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
|
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
|
||||||
File hoopSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||||
OutputStream os = new FileOutputStream(hoopSite);
|
os = new FileOutputStream(httpfsSite);
|
||||||
conf.writeXml(os);
|
conf.writeXml(os);
|
||||||
os.close();
|
os.close();
|
||||||
|
|
||||||
|
@ -103,7 +160,8 @@ public class TestHttpFSServer extends HFSTestCase {
|
||||||
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
|
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_UNAUTHORIZED);
|
||||||
|
|
||||||
url = new URL(TestJettyHelper.getJettyURL(),
|
url = new URL(TestJettyHelper.getJettyURL(),
|
||||||
MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation", "root"));
|
MessageFormat.format("/webhdfs/v1?user.name={0}&op=instrumentation",
|
||||||
|
HadoopUsersConfTestHelper.getHadoopUsers()[0]));
|
||||||
conn = (HttpURLConnection) url.openConnection();
|
conn = (HttpURLConnection) url.openConnection();
|
||||||
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
|
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
|
||||||
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
|
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
|
||||||
|
@ -112,7 +170,8 @@ public class TestHttpFSServer extends HFSTestCase {
|
||||||
Assert.assertTrue(line.contains("\"counters\":{"));
|
Assert.assertTrue(line.contains("\"counters\":{"));
|
||||||
|
|
||||||
url = new URL(TestJettyHelper.getJettyURL(),
|
url = new URL(TestJettyHelper.getJettyURL(),
|
||||||
MessageFormat.format("/webhdfs/v1/foo?user.name={0}&op=instrumentation", "root"));
|
MessageFormat.format("/webhdfs/v1/foo?user.name={0}&op=instrumentation",
|
||||||
|
HadoopUsersConfTestHelper.getHadoopUsers()[0]));
|
||||||
conn = (HttpURLConnection) url.openConnection();
|
conn = (HttpURLConnection) url.openConnection();
|
||||||
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
|
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_BAD_REQUEST);
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.lib.service.hadoop;
|
||||||
|
|
||||||
import junit.framework.Assert;
|
import junit.framework.Assert;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.lib.server.Server;
|
import org.apache.hadoop.lib.server.Server;
|
||||||
|
@ -34,13 +35,32 @@ import org.apache.hadoop.test.TestException;
|
||||||
import org.apache.hadoop.test.TestHdfs;
|
import org.apache.hadoop.test.TestHdfs;
|
||||||
import org.apache.hadoop.test.TestHdfsHelper;
|
import org.apache.hadoop.test.TestHdfsHelper;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStream;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
public class TestFileSystemAccessService extends HFSTestCase {
|
public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
|
|
||||||
|
private void createHadoopConf(Configuration hadoopConf) throws Exception {
|
||||||
|
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
||||||
|
File hdfsSite = new File(dir, "hdfs-site.xml");
|
||||||
|
OutputStream os = new FileOutputStream(hdfsSite);
|
||||||
|
hadoopConf.writeXml(os);
|
||||||
|
os.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void createHadoopConf() throws Exception {
|
||||||
|
Configuration hadoopConf = new Configuration(false);
|
||||||
|
hadoopConf.set("foo", "FOO");
|
||||||
|
createHadoopConf(hadoopConf);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@TestDir
|
@TestDir
|
||||||
public void simpleSecurity() throws Exception {
|
public void simpleSecurity() throws Exception {
|
||||||
|
@ -124,7 +144,7 @@ public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
FileSystemAccessService.class.getName()));
|
FileSystemAccessService.class.getName()));
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("server.services", services);
|
conf.set("server.services", services);
|
||||||
conf.set("server.hadoop.conf:foo", "FOO");
|
|
||||||
Server server = new Server("server", dir, dir, dir, dir, conf);
|
Server server = new Server("server", dir, dir, dir, dir, conf);
|
||||||
server.init();
|
server.init();
|
||||||
FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
|
FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
|
||||||
|
@ -132,6 +152,32 @@ public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
server.destroy();
|
server.destroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@TestDir
|
||||||
|
public void serviceHadoopConfCustomDir() throws Exception {
|
||||||
|
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
||||||
|
String hadoopConfDir = new File(dir, "confx").getAbsolutePath();
|
||||||
|
new File(hadoopConfDir).mkdirs();
|
||||||
|
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
||||||
|
FileSystemAccessService.class.getName()));
|
||||||
|
Configuration conf = new Configuration(false);
|
||||||
|
conf.set("server.services", services);
|
||||||
|
conf.set("server.hadoop.config.dir", hadoopConfDir);
|
||||||
|
|
||||||
|
File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
|
||||||
|
OutputStream os = new FileOutputStream(hdfsSite);
|
||||||
|
Configuration hadoopConf = new Configuration(false);
|
||||||
|
hadoopConf.set("foo", "BAR");
|
||||||
|
hadoopConf.writeXml(os);
|
||||||
|
os.close();
|
||||||
|
|
||||||
|
Server server = new Server("server", dir, dir, dir, dir, conf);
|
||||||
|
server.init();
|
||||||
|
FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
|
||||||
|
Assert.assertEquals(fsAccess.serviceHadoopConf.get("foo"), "BAR");
|
||||||
|
server.destroy();
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@TestDir
|
@TestDir
|
||||||
public void inWhitelists() throws Exception {
|
public void inWhitelists() throws Exception {
|
||||||
|
@ -188,12 +234,17 @@ public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
||||||
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
||||||
FileSystemAccessService.class.getName()));
|
FileSystemAccessService.class.getName()));
|
||||||
|
|
||||||
|
Configuration hadoopConf = new Configuration(false);
|
||||||
|
hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
|
||||||
|
createHadoopConf(hadoopConf);
|
||||||
|
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("server.services", services);
|
conf.set("server.services", services);
|
||||||
Server server = new Server("server", dir, dir, dir, dir, conf);
|
Server server = new Server("server", dir, dir, dir, dir, conf);
|
||||||
server.init();
|
server.init();
|
||||||
FileSystemAccess hadoop = server.get(FileSystemAccess.class);
|
FileSystemAccess hadoop = server.get(FileSystemAccess.class);
|
||||||
FileSystem fs = hadoop.createFileSystem("u", TestHdfsHelper.getHdfsConf());
|
FileSystem fs = hadoop.createFileSystem("u", hadoop.getFileSystemConfiguration());
|
||||||
Assert.assertNotNull(fs);
|
Assert.assertNotNull(fs);
|
||||||
fs.mkdirs(new Path("/tmp/foo"));
|
fs.mkdirs(new Path("/tmp/foo"));
|
||||||
hadoop.releaseFileSystem(fs);
|
hadoop.releaseFileSystem(fs);
|
||||||
|
@ -214,6 +265,11 @@ public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
||||||
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
||||||
FileSystemAccessService.class.getName()));
|
FileSystemAccessService.class.getName()));
|
||||||
|
|
||||||
|
Configuration hadoopConf = new Configuration(false);
|
||||||
|
hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
|
||||||
|
createHadoopConf(hadoopConf);
|
||||||
|
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("server.services", services);
|
conf.set("server.services", services);
|
||||||
Server server = new Server("server", dir, dir, dir, dir, conf);
|
Server server = new Server("server", dir, dir, dir, dir, conf);
|
||||||
|
@ -222,7 +278,7 @@ public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
|
|
||||||
final FileSystem fsa[] = new FileSystem[1];
|
final FileSystem fsa[] = new FileSystem[1];
|
||||||
|
|
||||||
hadoop.execute("u", TestHdfsHelper.getHdfsConf(), new FileSystemAccess.FileSystemExecutor<Void>() {
|
hadoop.execute("u", hadoop.getFileSystemConfiguration(), new FileSystemAccess.FileSystemExecutor<Void>() {
|
||||||
@Override
|
@Override
|
||||||
public Void execute(FileSystem fs) throws IOException {
|
public Void execute(FileSystem fs) throws IOException {
|
||||||
fs.mkdirs(new Path("/tmp/foo"));
|
fs.mkdirs(new Path("/tmp/foo"));
|
||||||
|
@ -248,14 +304,18 @@ public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
||||||
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
||||||
FileSystemAccessService.class.getName()));
|
FileSystemAccessService.class.getName()));
|
||||||
|
Configuration hadoopConf = new Configuration(false);
|
||||||
|
hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
|
||||||
|
createHadoopConf(hadoopConf);
|
||||||
|
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("server.services", services);
|
conf.set("server.services", services);
|
||||||
Server server = new Server("server", dir, dir, dir, dir, conf);
|
Server server = new Server("server", dir, dir, dir, dir, conf);
|
||||||
server.init();
|
server.init();
|
||||||
FileSystemAccess fsAccess = server.get(FileSystemAccess.class);
|
FileSystemAccess fsAccess = server.get(FileSystemAccess.class);
|
||||||
|
|
||||||
Configuration hdfsConf = TestHdfsHelper.getHdfsConf();
|
Configuration hdfsConf = fsAccess.getFileSystemConfiguration();
|
||||||
hdfsConf.set("fs.default.name", "");
|
hdfsConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "");
|
||||||
fsAccess.execute("u", hdfsConf, new FileSystemAccess.FileSystemExecutor<Void>() {
|
fsAccess.execute("u", hdfsConf, new FileSystemAccess.FileSystemExecutor<Void>() {
|
||||||
@Override
|
@Override
|
||||||
public Void execute(FileSystem fs) throws IOException {
|
public Void execute(FileSystem fs) throws IOException {
|
||||||
|
@ -271,6 +331,11 @@ public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
String dir = TestDirHelper.getTestDir().getAbsolutePath();
|
||||||
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
|
||||||
FileSystemAccessService.class.getName()));
|
FileSystemAccessService.class.getName()));
|
||||||
|
|
||||||
|
Configuration hadoopConf = new Configuration(false);
|
||||||
|
hadoopConf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, TestHdfsHelper.getHdfsConf().get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY));
|
||||||
|
createHadoopConf(hadoopConf);
|
||||||
|
|
||||||
Configuration conf = new Configuration(false);
|
Configuration conf = new Configuration(false);
|
||||||
conf.set("server.services", services);
|
conf.set("server.services", services);
|
||||||
Server server = new Server("server", dir, dir, dir, dir, conf);
|
Server server = new Server("server", dir, dir, dir, dir, conf);
|
||||||
|
@ -279,7 +344,7 @@ public class TestFileSystemAccessService extends HFSTestCase {
|
||||||
|
|
||||||
final FileSystem fsa[] = new FileSystem[1];
|
final FileSystem fsa[] = new FileSystem[1];
|
||||||
try {
|
try {
|
||||||
hadoop.execute("u", TestHdfsHelper.getHdfsConf(), new FileSystemAccess.FileSystemExecutor<Void>() {
|
hadoop.execute("u", hadoop.getFileSystemConfiguration(), new FileSystemAccess.FileSystemExecutor<Void>() {
|
||||||
@Override
|
@Override
|
||||||
public Void execute(FileSystem fs) throws IOException {
|
public Void execute(FileSystem fs) throws IOException {
|
||||||
fsa[0] = fs;
|
fsa[0] = fs;
|
||||||
|
|
|
@ -145,7 +145,12 @@ public class HadoopUsersConfTestHelper {
|
||||||
*/
|
*/
|
||||||
public static String[] getHadoopUserGroups(String user) {
|
public static String[] getHadoopUserGroups(String user) {
|
||||||
if (getHadoopUsers() == DEFAULT_USERS) {
|
if (getHadoopUsers() == DEFAULT_USERS) {
|
||||||
|
for (String defaultUser : DEFAULT_USERS) {
|
||||||
|
if (defaultUser.equals(user)) {
|
||||||
return DEFAULT_USERS_GROUP;
|
return DEFAULT_USERS_GROUP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new String[0];
|
||||||
} else {
|
} else {
|
||||||
String groups = System.getProperty(HADOOP_USER_PREFIX + user);
|
String groups = System.getProperty(HADOOP_USER_PREFIX + user);
|
||||||
return (groups != null) ? groups.split(",") : new String[0];
|
return (groups != null) ? groups.split(",") : new String[0];
|
||||||
|
|
|
@ -257,6 +257,8 @@ Release 2.0.0 - UNRELEASED
|
||||||
HDFS-3294. Fix code indentation in NamenodeWebHdfsMethods and
|
HDFS-3294. Fix code indentation in NamenodeWebHdfsMethods and
|
||||||
DatanodeWebHdfsMethods. (szetszwo)
|
DatanodeWebHdfsMethods. (szetszwo)
|
||||||
|
|
||||||
|
HDFS-3263. HttpFS should read HDFS config from Hadoop site.xml files (tucu)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HDFS-2477. Optimize computing the diff between a block report and the
|
HDFS-2477. Optimize computing the diff between a block report and the
|
||||||
|
|
Loading…
Reference in New Issue