don't throw exceptions from ctor, guice is hell

This commit is contained in:
Robert Muir 2015-12-19 02:09:14 -05:00
parent f174e96a14
commit 3269beeb4d
4 changed files with 32 additions and 20 deletions

View File

@ -55,38 +55,50 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa
public final static String TYPE = "hdfs"; public final static String TYPE = "hdfs";
private final HdfsBlobStore blobStore;
private final BlobPath basePath; private final BlobPath basePath;
private final ByteSizeValue chunkSize; private final ByteSizeValue chunkSize;
private final boolean compress; private final boolean compress;
private final RepositorySettings repositorySettings; private final RepositorySettings repositorySettings;
private final ThreadPool threadPool;
private final String path;
private FileContext fc; private FileContext fc;
private HdfsBlobStore blobStore;
@Inject @Inject
public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException { public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException {
super(name.getName(), repositorySettings, indexShardRepository); super(name.getName(), repositorySettings, indexShardRepository);
this.repositorySettings = repositorySettings; this.repositorySettings = repositorySettings;
this.threadPool = threadPool;
String path = repositorySettings.settings().get("path", settings.get("path")); path = repositorySettings.settings().get("path", settings.get("path"));
this.basePath = BlobPath.cleanPath();
this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null));
this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false));
}
@Override
protected void doStart() {
// get configuration
if (path == null) { if (path == null) {
throw new IllegalArgumentException("no 'path' defined for hdfs snapshot/restore"); throw new IllegalArgumentException("no 'path' defined for hdfs snapshot/restore");
} }
try {
// get configuration fc = getFileContext();
fc = getFileContext(); Path hdfsPath = SecurityUtils.execute(fc, new FcCallback<Path>() {
Path hdfsPath = SecurityUtils.execute(fc, new FcCallback<Path>() { @Override
@Override public Path doInHdfs(FileContext fc) throws IOException {
public Path doInHdfs(FileContext fc) throws IOException { return fc.makeQualified(new Path(path));
return fc.makeQualified(new Path(path)); }
} });
}); logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath);
this.basePath = BlobPath.cleanPath(); blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool);
} catch (IOException e) {
logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); throw new RuntimeException(e);
blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool); }
this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); super.doStart();
this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false));
} }
// as the FileSystem is long-lived and might go away, make sure to check it before it's being used. // as the FileSystem is long-lived and might go away, make sure to check it before it's being used.

View File

@ -10,7 +10,7 @@
body: body:
type: hdfs type: hdfs
settings: settings:
uri: "hdfs://localhost:36909" uri: "hdfs://localhost:40737"
path: "foo/bar" path: "foo/bar"
# Get repository # Get repository

View File

@ -12,7 +12,7 @@ setup:
body: body:
type: hdfs type: hdfs
settings: settings:
uri: "hdfs://localhost:36909" uri: "hdfs://localhost:40737"
path: "foo/bar" path: "foo/bar"
- do: - do:

View File

@ -12,7 +12,7 @@ setup:
body: body:
type: hdfs type: hdfs
settings: settings:
uri: "hdfs://localhost:36909" uri: "hdfs://localhost:40737"
path: "foo/bar" path: "foo/bar"
- do: - do: