(refactor) some opportunities to use diamond operator (#25585)

* (refactor) some opportunities to use diamond operator

* Update ExceptionRetryIT.java

update typo
This commit is contained in:
desmorto 2017-08-15 19:36:42 -03:00 committed by Lee Hinman
parent 1697f1521c
commit 292dd8f992
18 changed files with 23 additions and 23 deletions

View File

@ -176,7 +176,7 @@ public class QueryDSLDocumentationTests extends ESTestCase {
public void testGeoPolygon() {
// tag::geo_polygon
List<GeoPoint> points = new ArrayList<GeoPoint>(); // <1>
List<GeoPoint> points = new ArrayList<>(); // <1>
points.add(new GeoPoint(40, -70));
points.add(new GeoPoint(30, -80));
points.add(new GeoPoint(20, -90));

View File

@ -39,7 +39,7 @@ public final class LoggingTaskListener<Response> implements TaskListener<Respons
return (TaskListener<Response>) INSTANCE;
}
private static final LoggingTaskListener<Object> INSTANCE = new LoggingTaskListener<Object>();
private static final LoggingTaskListener<Object> INSTANCE = new LoggingTaskListener<>();
private LoggingTaskListener() {
}

View File

@ -169,7 +169,7 @@ public class StrictISODateTimeFormat {
if (fields == null || fields.size() == 0) {
throw new IllegalArgumentException("The fields must not be null or empty");
}
Set<DateTimeFieldType> workingFields = new HashSet<DateTimeFieldType>(fields);
Set<DateTimeFieldType> workingFields = new HashSet<>(fields);
int inputSize = workingFields.size();
boolean reducedPrec = false;
DateTimeFormatterBuilder bld = new DateTimeFormatterBuilder();

View File

@ -127,7 +127,7 @@ public class GetIndexIT extends ESIntegTestCase {
public void testSimpleMixedFeatures() {
int numFeatures = randomIntBetween(1, Feature.values().length);
List<Feature> features = new ArrayList<Feature>(numFeatures);
List<Feature> features = new ArrayList<>(numFeatures);
for (int i = 0; i < numFeatures; i++) {
features.add(randomFrom(Feature.values()));
}
@ -156,7 +156,7 @@ public class GetIndexIT extends ESIntegTestCase {
public void testEmptyMixedFeatures() {
int numFeatures = randomIntBetween(1, Feature.values().length);
List<Feature> features = new ArrayList<Feature>(numFeatures);
List<Feature> features = new ArrayList<>(numFeatures);
for (int i = 0; i < numFeatures; i++) {
features.add(randomFrom(Feature.values()));
}

View File

@ -37,7 +37,7 @@ public class KeyedLockTests extends ESTestCase {
public void testIfMapEmptyAfterLotsOfAcquireAndReleases() throws InterruptedException {
ConcurrentHashMap<String, Integer> counter = new ConcurrentHashMap<>();
ConcurrentHashMap<String, AtomicInteger> safeCounter = new ConcurrentHashMap<>();
KeyedLock<String> connectionLock = new KeyedLock<String>(randomBoolean());
KeyedLock<String> connectionLock = new KeyedLock<>(randomBoolean());
String[] names = new String[randomIntBetween(1, 40)];
for (int i = 0; i < names.length; i++) {
names[i] = randomRealisticUnicodeOfLengthBetween(10, 20);

View File

@ -128,7 +128,7 @@ public class ExceptionRetryIT extends ESIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get();
Set<String> uniqueIds = new HashSet();
Set<String> uniqueIds = new HashSet<>();
long dupCounter = 0;
boolean found_duplicate_already = false;
for (int i = 0; i < searchResponse.getHits().getHits().length; i++) {

View File

@ -42,7 +42,7 @@ public class SyncedFlushUtil {
*/
public static ShardsSyncedFlushResult attemptSyncedFlush(InternalTestCluster cluster, ShardId shardId) {
SyncedFlushService service = cluster.getInstance(SyncedFlushService.class);
LatchedListener<ShardsSyncedFlushResult> listener = new LatchedListener();
LatchedListener<ShardsSyncedFlushResult> listener = new LatchedListener<>();
service.attemptSyncedFlush(shardId, listener);
try {
listener.latch.await();

View File

@ -44,7 +44,7 @@ public class ScriptedMetricTests extends BaseAggregationTestCase<ScriptedMetricA
factory.reduceScript(randomScript("reduceScript"));
}
if (randomBoolean()) {
Map<String, Object> params = new HashMap<String, Object>();
Map<String, Object> params = new HashMap<>();
params.put("foo", "bar");
factory.params(params);
}

View File

@ -265,7 +265,7 @@ public class DerivativeIT extends ESIntegTestCase {
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)deriv).getProperty("_count");
Object[] propertiesSumCounts = (Object[]) ((InternalAggregation)deriv).getProperty("sum.value");
List<Bucket> buckets = new ArrayList<Bucket>(deriv.getBuckets());
List<Bucket> buckets = new ArrayList<>(deriv.getBuckets());
Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets
// overwritten
for (int i = 0; i < numValueBuckets; ++i) {
@ -311,7 +311,7 @@ public class DerivativeIT extends ESIntegTestCase {
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)deriv).getProperty("_count");
Object[] propertiesSumCounts = (Object[]) ((InternalAggregation)deriv).getProperty("stats.sum");
List<Bucket> buckets = new ArrayList<Bucket>(deriv.getBuckets());
List<Bucket> buckets = new ArrayList<>(deriv.getBuckets());
Long expectedSumPreviousBucket = Long.MIN_VALUE; // start value, gets
// overwritten
for (int i = 0; i < numValueBuckets; ++i) {

View File

@ -97,7 +97,7 @@ public class ExistsIT extends ESIntegTestCase {
// empty doc
emptyMap()
};
List<IndexRequestBuilder> reqs = new ArrayList<IndexRequestBuilder>();
List<IndexRequestBuilder> reqs = new ArrayList<>();
for (Map<String, Object> source : sources) {
reqs.add(client().prepareIndex("idx", "type").setSource(source));
}
@ -106,7 +106,7 @@ public class ExistsIT extends ESIntegTestCase {
// confuse the exists/missing parser at query time
indexRandom(true, false, reqs);
final Map<String, Integer> expected = new LinkedHashMap<String, Integer>();
final Map<String, Integer> expected = new LinkedHashMap<>();
expected.put("foo", 1);
expected.put("f*", 1);
expected.put("bar", 2);

View File

@ -183,7 +183,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
public void testSingleContextFiltering() throws Exception {
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<String, ContextMapping>(Collections.singletonMap("cat", contextMapping));
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
createIndexAndMapping(mapping);
int numDocs = 10;
@ -209,7 +209,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
public void testSingleContextBoosting() throws Exception {
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<String, ContextMapping>(Collections.singletonMap("cat", contextMapping));
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
createIndexAndMapping(mapping);
int numDocs = 10;
@ -237,7 +237,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
public void testSingleContextMultipleContexts() throws Exception {
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<String, ContextMapping>(Collections.singletonMap("cat", contextMapping));
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
createIndexAndMapping(mapping);
int numDocs = 10;

View File

@ -119,7 +119,7 @@ public class DefBootstrapTests extends ESTestCase {
assertEquals(3, (int)handle.invokeExact((Object) Arrays.asList("x", "y", "z")));
assertEquals(2, (int)handle.invokeExact((Object) Arrays.asList("u", "v")));
final HashMap<String,String> map = new HashMap<String,String>();
final HashMap<String,String> map = new HashMap<>();
map.put("x", "y");
map.put("a", "b");
assertEquals(2, (int)handle.invokeExact((Object) map));

View File

@ -368,7 +368,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
*/
void onBulkResponse(TimeValue thisBatchStartTime, BulkResponse response) {
try {
List<Failure> failures = new ArrayList<Failure>();
List<Failure> failures = new ArrayList<>();
Set<String> destinationIndicesThisBatch = new HashSet<>();
for (BulkItemResponse item : response) {
if (item.isFailed()) {

View File

@ -121,7 +121,7 @@ public class ReindexFailureTests extends ReindexTestCase {
}
private void indexDocs(int count) throws Exception {
List<IndexRequestBuilder> docs = new ArrayList<IndexRequestBuilder>(count);
List<IndexRequestBuilder> docs = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
docs.add(client().prepareIndex("source", "test", Integer.toString(i)).setSource("test", "words words"));
}

View File

@ -31,7 +31,7 @@ import java.util.Set;
public class KuromojiPartOfSpeechFilterFactory extends AbstractTokenFilterFactory {
private final Set<String> stopTags = new HashSet<String>();
private final Set<String> stopTags = new HashSet<>();
public KuromojiPartOfSpeechFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);

View File

@ -110,7 +110,7 @@ class S3BlobStore extends AbstractComponent implements BlobStore {
//we can do at most 1K objects per delete
//We don't know the bucket name until first object listing
DeleteObjectsRequest multiObjectDeleteRequest = null;
ArrayList<KeyVersion> keys = new ArrayList<KeyVersion>();
ArrayList<KeyVersion> keys = new ArrayList<>();
while (true) {
ObjectListing list;
if (prevListing != null) {

View File

@ -452,7 +452,7 @@ public abstract class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase
//we can do at most 1K objects per delete
//We don't know the bucket name until first object listing
DeleteObjectsRequest multiObjectDeleteRequest = null;
ArrayList<DeleteObjectsRequest.KeyVersion> keys = new ArrayList<DeleteObjectsRequest.KeyVersion>();
ArrayList<DeleteObjectsRequest.KeyVersion> keys = new ArrayList<>();
while (true) {
ObjectListing list;
if (prevListing != null) {

View File

@ -53,7 +53,7 @@ public class TestAmazonS3 extends AmazonS3Wrapper {
private String randomPrefix;
ConcurrentMap<String, AtomicLong> accessCounts = new ConcurrentHashMap<String, AtomicLong>();
ConcurrentMap<String, AtomicLong> accessCounts = new ConcurrentHashMap<>();
private long incrementAndGet(String path) {
AtomicLong value = accessCounts.get(path);