minimize accessiblity, remove unused threadpool

This commit is contained in:
Robert Muir 2015-12-21 12:39:40 -05:00
parent f67390e0c8
commit f81b12e327
8 changed files with 18 additions and 35 deletions

View File

@ -26,7 +26,7 @@ import java.nio.file.Paths
esplugin {
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin'
classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
}
versions << [

View File

@ -41,12 +41,12 @@ import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.Map;
public class HdfsBlobContainer extends AbstractBlobContainer {
final class HdfsBlobContainer extends AbstractBlobContainer {
protected final HdfsBlobStore blobStore;
protected final Path path;
private final HdfsBlobStore blobStore;
private final Path path;
public HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore blobStore, Path path) {
HdfsBlobContainer(BlobPath blobPath, HdfsBlobStore blobStore, Path path) {
super(blobPath);
this.blobStore = blobStore;
this.path = path;

View File

@ -28,23 +28,19 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.util.concurrent.Executor;
public class HdfsBlobStore extends AbstractComponent implements BlobStore {
final class HdfsBlobStore extends AbstractComponent implements BlobStore {
private final FileContextFactory fcf;
private final Path rootHdfsPath;
private final ThreadPool threadPool;
private final int bufferSizeInBytes;
public HdfsBlobStore(Settings settings, FileContextFactory fcf, Path path, ThreadPool threadPool) throws IOException {
HdfsBlobStore(Settings settings, FileContextFactory fcf, Path path) throws IOException {
super(settings);
this.fcf = fcf;
this.rootHdfsPath = path;
this.threadPool = threadPool;
this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes();
@ -68,19 +64,11 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore {
return rootHdfsPath.toUri().toString();
}
public FileContextFactory fileContextFactory() {
FileContextFactory fileContextFactory() {
return fcf;
}
public Path path() {
return rootHdfsPath;
}
public Executor executor() {
return threadPool.executor(ThreadPool.Names.SNAPSHOT);
}
public int bufferSizeInBytes() {
int bufferSizeInBytes() {
return bufferSizeInBytes;
}
@ -111,7 +99,7 @@ public class HdfsBlobStore extends AbstractComponent implements BlobStore {
}
private Path translateToHdfsPath(BlobPath blobPath) {
Path path = path();
Path path = rootHdfsPath;
for (String p : blobPath) {
path = new Path(path, p);
}

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.hadoop.hdfs;
package org.elasticsearch.repositories.hdfs;
import java.io.IOException;
import java.nio.file.Files;
@ -24,14 +24,11 @@ import java.nio.file.Path;
import java.security.AccessController;
import java.security.PrivilegedAction;
import org.apache.lucene.util.Constants;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.RepositoriesModule;
import org.elasticsearch.repositories.hdfs.HdfsRepository;
// Code
public class HdfsPlugin extends Plugin {

View File

@ -49,7 +49,6 @@ import org.elasticsearch.index.snapshots.IndexShardRepository;
import org.elasticsearch.repositories.RepositoryName;
import org.elasticsearch.repositories.RepositorySettings;
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
import org.elasticsearch.threadpool.ThreadPool;
public class HdfsRepository extends BlobStoreRepository implements FileContextFactory {
@ -59,18 +58,16 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa
private final ByteSizeValue chunkSize;
private final boolean compress;
private final RepositorySettings repositorySettings;
private final ThreadPool threadPool;
private final String path;
private final String uri;
private FileContext fc;
private HdfsBlobStore blobStore;
@Inject
public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ThreadPool threadPool) throws IOException {
public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException {
super(name.getName(), repositorySettings, indexShardRepository);
this.repositorySettings = repositorySettings;
this.threadPool = threadPool;
uri = repositorySettings.settings().get("uri", settings.get("uri"));
path = repositorySettings.settings().get("path", settings.get("path"));
@ -112,7 +109,7 @@ public class HdfsRepository extends BlobStoreRepository implements FileContextFa
}
});
logger.debug("Using file-system [{}] for URI [{}], path [{}]", fc.getDefaultFileSystem(), fc.getDefaultFileSystem().getUri(), hdfsPath);
blobStore = new HdfsBlobStore(settings, this, hdfsPath, threadPool);
blobStore = new HdfsBlobStore(settings, this, hdfsPath);
} catch (IOException e) {
throw new RuntimeException(e);
}

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.hadoop.hdfs;
package org.elasticsearch.repositories.hdfs;
import java.io.IOException;
import java.util.Collection;
@ -24,8 +24,8 @@ import java.util.Collection;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.hdfs.HdfsPlugin;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.RestTestCandidate;
import org.elasticsearch.test.rest.parser.RestTestParseException;

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.hadoop.hdfs;
package org.elasticsearch.repositories.hdfs;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
@ -31,6 +31,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.repositories.RepositoryException;
import org.elasticsearch.repositories.hdfs.HdfsPlugin;
import org.elasticsearch.snapshots.SnapshotState;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.plugin.hadoop.hdfs;
package org.elasticsearch.repositories.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DelegateToFileSystem;