move to elasticsearch 0.19.0 snap and use some of its features

This commit is contained in:
Shay Banon 2012-01-31 13:04:54 +02:00
parent b22efc01e7
commit e4a23e244b
3 changed files with 4 additions and 5 deletions

View File

@ -8,7 +8,7 @@ In order to install the plugin, simply run: `bin/plugin -install elasticsearch/e
---------------------------------------
| AWS Cloud Plugin | ElasticSearch |
---------------------------------------
| master | 0.18 -> master |
| master | master (0.19) |
---------------------------------------
| 1.2.0 | 0.18 -> master |
---------------------------------------

View File

@ -31,7 +31,7 @@
</parent>
<properties>
<elasticsearch.version>0.18.5</elasticsearch.version>
<elasticsearch.version>0.19.0-SNAPSHOT</elasticsearch.version>
</properties>
<repositories>

View File

@ -30,8 +30,6 @@ import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.DynamicExecutors;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.gateway.blobstore.BlobStoreGateway;
import org.elasticsearch.index.gateway.s3.S3IndexGatewayModule;
@ -39,6 +37,7 @@ import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
/**
*
@ -83,7 +82,7 @@ public class S3Gateway extends BlobStoreGateway {
ByteSizeValue chunkSize = componentSettings.getAsBytesSize("chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB));
int concurrentStreams = componentSettings.getAsInt("concurrent_streams", 5);
this.concurrentStreamPool = DynamicExecutors.newScalingThreadPool(1, concurrentStreams, TimeValue.timeValueSeconds(5).millis(), EsExecutors.daemonThreadFactory(settings, "[s3_stream]"));
this.concurrentStreamPool = EsExecutors.newScalingExecutorService(1, concurrentStreams, 5, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory(settings, "[s3_stream]"));
logger.debug("using bucket [{}], region [{}], chunk_size [{}], concurrent_streams [{}]", bucket, region, chunkSize, concurrentStreams);