better containing of hadoop for actual blobstore operations
This commit is contained in:
parent
010d1a89c5
commit
7abd051734
|
@ -29,10 +29,13 @@ import org.elasticsearch.common.blobstore.BlobPath;
|
||||||
import org.elasticsearch.common.blobstore.BlobStore;
|
import org.elasticsearch.common.blobstore.BlobStore;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.lang.reflect.ReflectPermission;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
import java.security.PrivilegedActionException;
|
import java.security.PrivilegedActionException;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
|
||||||
|
import javax.security.auth.AuthPermission;
|
||||||
|
|
||||||
final class HdfsBlobStore implements BlobStore {
|
final class HdfsBlobStore implements BlobStore {
|
||||||
|
|
||||||
private final Path root;
|
private final Path root;
|
||||||
|
@ -114,6 +117,9 @@ final class HdfsBlobStore implements BlobStore {
|
||||||
/**
|
/**
|
||||||
* Executes the provided operation against this store
|
* Executes the provided operation against this store
|
||||||
*/
|
*/
|
||||||
|
// we can do FS ops with only two elevated permissions:
|
||||||
|
// 1) hadoop dynamic proxy is messy with access rules
|
||||||
|
// 2) allow hadoop to add credentials to our Subject
|
||||||
<V> V execute(Operation<V> operation) throws IOException {
|
<V> V execute(Operation<V> operation) throws IOException {
|
||||||
SecurityManager sm = System.getSecurityManager();
|
SecurityManager sm = System.getSecurityManager();
|
||||||
if (sm != null) {
|
if (sm != null) {
|
||||||
|
@ -129,7 +135,8 @@ final class HdfsBlobStore implements BlobStore {
|
||||||
public V run() throws IOException {
|
public V run() throws IOException {
|
||||||
return operation.run(fileContext);
|
return operation.run(fileContext);
|
||||||
}
|
}
|
||||||
});
|
}, null, new ReflectPermission("suppressAccessChecks"),
|
||||||
|
new AuthPermission("modifyPrivateCredentials"));
|
||||||
} catch (PrivilegedActionException pae) {
|
} catch (PrivilegedActionException pae) {
|
||||||
throw (IOException) pae.getException();
|
throw (IOException) pae.getException();
|
||||||
}
|
}
|
||||||
|
|
|
@ -63,6 +63,8 @@ public final class HdfsPlugin extends Plugin {
|
||||||
Class.forName("org.apache.hadoop.util.StringUtils");
|
Class.forName("org.apache.hadoop.util.StringUtils");
|
||||||
Class.forName("org.apache.hadoop.util.ShutdownHookManager");
|
Class.forName("org.apache.hadoop.util.ShutdownHookManager");
|
||||||
Class.forName("org.apache.hadoop.conf.Configuration");
|
Class.forName("org.apache.hadoop.conf.Configuration");
|
||||||
|
Class.forName("org.apache.hadoop.hdfs.protocol.HdfsConstants");
|
||||||
|
Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck");
|
||||||
} catch (ClassNotFoundException | IOException e) {
|
} catch (ClassNotFoundException | IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
} finally {
|
} finally {
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
grant {
|
grant {
|
||||||
// Hadoop UserGroupInformation clinit
|
// Hadoop UserGroupInformation, HdfsConstants, PipelineAck clinit
|
||||||
permission java.lang.RuntimePermission "getClassLoader";
|
permission java.lang.RuntimePermission "getClassLoader";
|
||||||
|
|
||||||
// UserGroupInformation (UGI) Metrics clinit
|
// UserGroupInformation (UGI) Metrics clinit
|
||||||
|
|
Loading…
Reference in New Issue