diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestSecurityManager.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestSecurityManager.java index ee2e3826600..99c62700445 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/TestSecurityManager.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestSecurityManager.java @@ -41,93 +41,6 @@ public final class TestSecurityManager extends SecurityManager { super(); } - // TODO: move this stuff into a Solr (non-test) SecurityManager! - /** - * {@inheritDoc} - *
This method implements hacks to workaround hadoop's garbage Shell and FileUtil code - */ - @Override - public void checkExec(String cmd) { - // NOTE: it would be tempting to just allow anything from hadoop's Shell class, but then - // that would just give an easy vector for RCE (use hadoop Shell instead of e.g. ProcessBuilder) - // so we whitelist actual caller impl methods instead. - for (StackTraceElement element : Thread.currentThread().getStackTrace()) { - // hadoop insists on shelling out to get the user's supplementary groups? - if ("org.apache.hadoop.security.ShellBasedUnixGroupsMapping".equals(element.getClassName()) && - "getGroups".equals(element.getMethodName())) { - return; - } - // hadoop insists on shelling out to parse 'df' command instead of using FileStore? - if ("org.apache.hadoop.fs.DF".equals(element.getClassName()) && - "getFilesystem".equals(element.getMethodName())) { - return; - } - // hadoop insists on shelling out to parse 'du' command instead of using FileStore? - if ("org.apache.hadoop.fs.DU".equals(element.getClassName()) && - "refresh".equals(element.getMethodName())) { - return; - } - // hadoop insists on shelling out to parse 'ls' command instead of java nio apis? - if ("org.apache.hadoop.util.DiskChecker".equals(element.getClassName()) && - "checkDir".equals(element.getMethodName())) { - return; - } - // hadoop insists on shelling out to parse 'stat' command instead of Files.getAttributes? - if ("org.apache.hadoop.fs.HardLink".equals(element.getClassName()) && - "getLinkCount".equals(element.getMethodName())) { - return; - } - // hadoop "canExecute" method doesn't handle securityexception and fails completely. - // so, lie to it, and tell it we will happily execute, so it does not crash. - if ("org.apache.hadoop.fs.FileUtil".equals(element.getClassName()) && - "canExecute".equals(element.getMethodName())) { - return; - } - } - super.checkExec(cmd); - } - - /** - * {@inheritDoc} - *
This method implements hacks to workaround hadoop's garbage FileUtil code - */ - @Override - public void checkWrite(String file) { - for (StackTraceElement element : Thread.currentThread().getStackTrace()) { - // hadoop "canWrite" method doesn't handle securityexception and fails completely. - // so, lie to it, and tell it we will happily write, so it does not crash. - if ("org.apache.hadoop.fs.FileUtil".equals(element.getClassName()) && - "canWrite".equals(element.getMethodName())) { - return; - } - } - super.checkWrite(file); - } - - /** - * {@inheritDoc} - *
This method implements hacks to workaround hadoop's garbage FileUtil code - */ - @Override - public void checkRead(String file) { - for (StackTraceElement element : Thread.currentThread().getStackTrace()) { - // hadoop "createPermissionsDiagnosisString" method doesn't handle securityexception and fails completely. - // it insists on climbing up full directory tree! - // so, lie to it, and tell it we will happily read, so it does not crash. - if ("org.apache.hadoop.hdfs.MiniDFSCluster".equals(element.getClassName()) && - "createPermissionsDiagnosisString".equals(element.getMethodName())) { - return; - } - // hadoop "canRead" method doesn't handle securityexception and fails completely. - // so, lie to it, and tell it we will happily read, so it does not crash. - if ("org.apache.hadoop.fs.FileUtil".equals(element.getClassName()) && - "canRead".equals(element.getMethodName())) { - return; - } - } - super.checkRead(file); - } - /** * {@inheritDoc} *
This method inspects the stack trace and checks who is calling
diff --git a/solr/common-build.xml b/solr/common-build.xml
index 24e7d9c1948..8b513b62e0b 100644
--- a/solr/common-build.xml
+++ b/solr/common-build.xml
@@ -152,7 +152,12 @@
-
+
+ * Use this with {@code -Djava.security.manager=org.apache.solr.util.SolrSecurityManager}. + */ +public final class SolrSecurityManager extends SecurityManager { + + static final String JUNIT4_TEST_RUNNER_PACKAGE = "com.carrotsearch.ant.tasks.junit4."; + static final String ECLIPSE_TEST_RUNNER_PACKAGE = "org.eclipse.jdt.internal.junit.runner."; + static final String IDEA_TEST_RUNNER_PACKAGE = "com.intellij.rt.execution.junit."; + + /** + * Creates a new SolrSecurityManager. This ctor is called on JVM startup, + * when {@code -Djava.security.manager=org.apache.lucene.util.TestSecurityManager} + * is passed to JVM. + */ + public SolrSecurityManager() { + super(); + } + + // TODO: move this stuff into a Solr (non-test) SecurityManager! + /** + * {@inheritDoc} + *
This method implements hacks to workaround hadoop's garbage Shell and FileUtil code + */ + @Override + public void checkExec(String cmd) { + // NOTE: it would be tempting to just allow anything from hadoop's Shell class, but then + // that would just give an easy vector for RCE (use hadoop Shell instead of e.g. ProcessBuilder) + // so we whitelist actual caller impl methods instead. + for (StackTraceElement element : Thread.currentThread().getStackTrace()) { + // hadoop insists on shelling out to get the user's supplementary groups? + if ("org.apache.hadoop.security.ShellBasedUnixGroupsMapping".equals(element.getClassName()) && + "getGroups".equals(element.getMethodName())) { + return; + } + // hadoop insists on shelling out to parse 'df' command instead of using FileStore? + if ("org.apache.hadoop.fs.DF".equals(element.getClassName()) && + "getFilesystem".equals(element.getMethodName())) { + return; + } + // hadoop insists on shelling out to parse 'du' command instead of using FileStore? + if ("org.apache.hadoop.fs.DU".equals(element.getClassName()) && + "refresh".equals(element.getMethodName())) { + return; + } + // hadoop insists on shelling out to parse 'ls' command instead of java nio apis? + if ("org.apache.hadoop.util.DiskChecker".equals(element.getClassName()) && + "checkDir".equals(element.getMethodName())) { + return; + } + // hadoop insists on shelling out to parse 'stat' command instead of Files.getAttributes? + if ("org.apache.hadoop.fs.HardLink".equals(element.getClassName()) && + "getLinkCount".equals(element.getMethodName())) { + return; + } + // hadoop "canExecute" method doesn't handle securityexception and fails completely. + // so, lie to it, and tell it we will happily execute, so it does not crash. + if ("org.apache.hadoop.fs.FileUtil".equals(element.getClassName()) && + "canExecute".equals(element.getMethodName())) { + return; + } + } + super.checkExec(cmd); + } + + /** + * {@inheritDoc} + *
This method implements hacks to workaround hadoop's garbage FileUtil code + */ + @Override + public void checkWrite(String file) { + for (StackTraceElement element : Thread.currentThread().getStackTrace()) { + // hadoop "canWrite" method doesn't handle securityexception and fails completely. + // so, lie to it, and tell it we will happily write, so it does not crash. + if ("org.apache.hadoop.fs.FileUtil".equals(element.getClassName()) && + "canWrite".equals(element.getMethodName())) { + return; + } + } + super.checkWrite(file); + } + + /** + * {@inheritDoc} + *
This method implements hacks to workaround hadoop's garbage FileUtil code + */ + @Override + public void checkRead(String file) { + for (StackTraceElement element : Thread.currentThread().getStackTrace()) { + // hadoop "createPermissionsDiagnosisString" method doesn't handle securityexception and fails completely. + // it insists on climbing up full directory tree! + // so, lie to it, and tell it we will happily read, so it does not crash. + if ("org.apache.hadoop.hdfs.MiniDFSCluster".equals(element.getClassName()) && + "createPermissionsDiagnosisString".equals(element.getMethodName())) { + return; + } + // hadoop "canRead" method doesn't handle securityexception and fails completely. + // so, lie to it, and tell it we will happily read, so it does not crash. + if ("org.apache.hadoop.fs.FileUtil".equals(element.getClassName()) && + "canRead".equals(element.getMethodName())) { + return; + } + } + super.checkRead(file); + } + + /** + * {@inheritDoc} + *
This method inspects the stack trace and checks who is calling
+ * {@link System#exit(int)} and similar methods
+ * @throws SecurityException if the caller of this method is not the test runner itself.
+ */
+ @Override
+ public void checkExit(final int status) {
+ AccessController.doPrivileged(new PrivilegedAction