diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 2bda9d5857a..b00c72bad8d 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -55,38 +55,50 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa public final static String TYPE = "hdfs"; - private final HdfsBlobStore blobStore; private final BlobPath basePath; private final ByteSizeValue chunkSize; private final boolean compress; private final RepositorySettings repositorySettings; + private final ThreadPool threadPool; + private final String path; private FileContext fc; + private HdfsBlobStore blobStore; @Inject public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); this.repositorySettings = repositorySettings; + this.threadPool = threadPool; - String path = repositorySettings.settings().get("path", settings.get("path")); + path = repositorySettings.settings().get("path", settings.get("path")); + + + this.basePath = BlobPath.cleanPath(); + this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); + this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false)); + } + + @Override + protected void doStart() { + // get configuration if (path == null) { throw new IllegalArgumentException("no 'path' defined for hdfs snapshot/restore"); } - - // get configuration - fc = getFileContext(); - Path hdfsPath = SecurityUtils.execute(fc, new FcCallback() { - @Override - public Path doInHdfs(FileContext fc) throws IOException { - return fc.makeQualified(new Path(path)); - } - }); - this.basePath = BlobPath.cleanPath(); - - logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); - blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool); - this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("chunk_size", null)); - this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("compress", false)); + try { + fc = getFileContext(); + Path hdfsPath = SecurityUtils.execute(fc, new FcCallback() { + @Override + public Path doInHdfs(FileContext fc) throws IOException { + return fc.makeQualified(new Path(path)); + } + }); + logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath); + blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool); + } catch (IOException e) { + throw new RuntimeException(e); + } + super.doStart(); } // as the FileSystem is long-lived and might go away, make sure to check it before it's being used. diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml index 20cbcef35fd..c01128477ac 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/20_repository.yaml @@ -10,7 +10,7 @@ body: type: hdfs settings: - uri: "hdfs://localhost:36909" + uri: "hdfs://localhost:40737" path: "foo/bar" # Get repository diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml index 95fcd7d5159..dcb521ff2e3 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -12,7 +12,7 @@ setup: body: type: hdfs settings: - uri: "hdfs://localhost:36909" + uri: "hdfs://localhost:40737" path: "foo/bar" - do: diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml index 767775e4039..09f33aff2e2 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/40_restore.yaml @@ -12,7 +12,7 @@ setup: body: type: hdfs settings: - uri: "hdfs://localhost:36909" + uri: "hdfs://localhost:40737" path: "foo/bar" - do: