add cleanups from simon

This commit is contained in:
Robert Muir 2015-12-23 18:15:33 -05:00
parent 7abd051734
commit f14a21639c
1 changed files with 8 additions and 4 deletions

View File

@ -58,6 +58,10 @@ public final class HdfsRepository extends BlobStoreRepository {
private HdfsBlobStore blobStore;
// buffer size passed to HDFS read/write methods
// TODO: why 100KB?
private static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(100, ByteSizeUnit.KB);
@Inject
public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException {
super(name.getName(), repositorySettings, indexShardRepository);
@ -70,11 +74,11 @@ public final class HdfsRepository extends BlobStoreRepository {
@Override
protected void doStart() {
String uriSetting = repositorySettings.settings().get("uri");
if (!Strings.hasText(uriSetting)) {
if (Strings.hasText(uriSetting) == false) {
throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore");
}
URI uri = URI.create(uriSetting);
if (!"hdfs".equalsIgnoreCase(uri.getScheme())) {
if ("hdfs".equalsIgnoreCase(uri.getScheme()) == false) {
throw new IllegalArgumentException(
String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", uri.getScheme(), uriSetting));
}
@ -89,7 +93,7 @@ public final class HdfsRepository extends BlobStoreRepository {
throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore");
}
int bufferSize = (int) repositorySettings.settings().getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes();
int bufferSize = repositorySettings.settings().getAsBytesSize("buffer_size", DEFAULT_BUFFER_SIZE).bytesAsInt();
try {
// initialize our filecontext