better containing of hadoop for actual blobstore operations

This commit is contained in:
Robert Muir 2015-12-22 12:07:37 -05:00
parent 010d1a89c5
commit 7abd051734
3 changed files with 12 additions and 3 deletions

View File

@ -29,10 +29,13 @@ import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStore;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.ReflectPermission;
import java.security.AccessController; import java.security.AccessController;
import java.security.PrivilegedActionException; import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import javax.security.auth.AuthPermission;
final class HdfsBlobStore implements BlobStore { final class HdfsBlobStore implements BlobStore {
private final Path root; private final Path root;
@ -110,10 +113,13 @@ final class HdfsBlobStore implements BlobStore {
interface Operation<V> { interface Operation<V> {
V run(FileContext fileContext) throws IOException; V run(FileContext fileContext) throws IOException;
} }
/** /**
* Executes the provided operation against this store * Executes the provided operation against this store
*/ */
// we can do FS ops with only two elevated permissions:
// 1) hadoop dynamic proxy is messy with access rules
// 2) allow hadoop to add credentials to our Subject
<V> V execute(Operation<V> operation) throws IOException { <V> V execute(Operation<V> operation) throws IOException {
SecurityManager sm = System.getSecurityManager(); SecurityManager sm = System.getSecurityManager();
if (sm != null) { if (sm != null) {
@ -129,7 +135,8 @@ final class HdfsBlobStore implements BlobStore {
public V run() throws IOException { public V run() throws IOException {
return operation.run(fileContext); return operation.run(fileContext);
} }
}); }, null, new ReflectPermission("suppressAccessChecks"),
new AuthPermission("modifyPrivateCredentials"));
} catch (PrivilegedActionException pae) { } catch (PrivilegedActionException pae) {
throw (IOException) pae.getException(); throw (IOException) pae.getException();
} }

View File

@ -63,6 +63,8 @@ public final class HdfsPlugin extends Plugin {
Class.forName("org.apache.hadoop.util.StringUtils"); Class.forName("org.apache.hadoop.util.StringUtils");
Class.forName("org.apache.hadoop.util.ShutdownHookManager"); Class.forName("org.apache.hadoop.util.ShutdownHookManager");
Class.forName("org.apache.hadoop.conf.Configuration"); Class.forName("org.apache.hadoop.conf.Configuration");
Class.forName("org.apache.hadoop.hdfs.protocol.HdfsConstants");
Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck");
} catch (ClassNotFoundException | IOException e) { } catch (ClassNotFoundException | IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} finally { } finally {

View File

@ -18,7 +18,7 @@
*/ */
grant { grant {
// Hadoop UserGroupInformation clinit // Hadoop UserGroupInformation, HdfsConstants, PipelineAck clinit
permission java.lang.RuntimePermission "getClassLoader"; permission java.lang.RuntimePermission "getClassLoader";
// UserGroupInformation (UGI) Metrics clinit // UserGroupInformation (UGI) Metrics clinit