try to get windows working
This commit is contained in:
parent
2347e3c373
commit
ee546ff655
|
@ -18,10 +18,14 @@
|
|||
*/
|
||||
package org.elasticsearch.plugin.hadoop.hdfs;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.repositories.RepositoriesModule;
|
||||
|
@ -33,23 +37,40 @@ public class HdfsPlugin extends Plugin {
|
|||
|
||||
// initialize some problematic classes with elevated privileges
|
||||
static {
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
||||
@Override
|
||||
public Void run() {
|
||||
try {
|
||||
Class.forName("org.apache.hadoop.security.UserGroupInformation");
|
||||
Class.forName("org.apache.hadoop.util.StringUtils");
|
||||
Class.forName("org.apache.hadoop.util.ShutdownHookManager");
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
||||
@Override
|
||||
public Void run() {
|
||||
return evilHadoopInit();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Needs a security hack for hadoop on windows, until HADOOP-XXXX is fixed")
|
||||
private static Void evilHadoopInit() {
|
||||
String oldValue = null;
|
||||
try {
|
||||
// hack: on Windows, Shell's cinit has a similar problem that on unix,
|
||||
// but here we can workaround it for now by setting hadoop home
|
||||
// TODO: remove THIS when hadoop is fixed
|
||||
Path hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath();
|
||||
oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString());
|
||||
Class.forName("org.apache.hadoop.security.UserGroupInformation");
|
||||
Class.forName("org.apache.hadoop.util.StringUtils");
|
||||
Class.forName("org.apache.hadoop.util.ShutdownHookManager");
|
||||
} catch (ClassNotFoundException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
if (oldValue == null) {
|
||||
System.clearProperty("hadoop.home.dir");
|
||||
} else {
|
||||
System.setProperty("hadoop.home.dir", oldValue);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue