Remove multi-arg ImmutableMap#of variants
This commit is contained in:
parent
e75f1137f2
commit
ab7fa7fe9e
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.mapping.get;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
|
@ -56,6 +54,7 @@ import java.util.Iterator;
|
|||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.common.util.CollectionUtils.newLinkedList;
|
||||
|
||||
/**
|
||||
|
@ -110,13 +109,13 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
|||
MapBuilder<String, Map<String, FieldMappingMetaData>> typeMappings = new MapBuilder<>();
|
||||
for (String type : typeIntersection) {
|
||||
DocumentMapper documentMapper = indexService.mapperService().documentMapper(type);
|
||||
ImmutableMap<String, FieldMappingMetaData> fieldMapping = findFieldMappingsByType(documentMapper, request);
|
||||
Map<String, FieldMappingMetaData> fieldMapping = findFieldMappingsByType(documentMapper, request);
|
||||
if (!fieldMapping.isEmpty()) {
|
||||
typeMappings.put(type, fieldMapping);
|
||||
}
|
||||
}
|
||||
|
||||
return new GetFieldMappingsResponse(ImmutableMap.of(shardId.getIndex(), typeMappings.immutableMap()));
|
||||
return new GetFieldMappingsResponse(singletonMap(shardId.getIndex(), typeMappings.immutableMap()));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -166,7 +165,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc
|
|||
}
|
||||
};
|
||||
|
||||
private ImmutableMap<String, FieldMappingMetaData> findFieldMappingsByType(DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) {
|
||||
private Map<String, FieldMappingMetaData> findFieldMappingsByType(DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) {
|
||||
MapBuilder<String, FieldMappingMetaData> fieldMappings = new MapBuilder<>();
|
||||
final DocumentFieldMappers allFieldMappers = documentMapper.mappers();
|
||||
for (String field : request.fields()) {
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.search.fetch.explain;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
|
@ -30,6 +29,8 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -37,7 +38,7 @@ public class ExplainFetchSubPhase implements FetchSubPhase {
|
|||
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return ImmutableMap.of("explain", new ExplainParseElement());
|
||||
return singletonMap("explain", new ExplainParseElement());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -59,7 +60,7 @@ public class ExplainFetchSubPhase implements FetchSubPhase {
|
|||
try {
|
||||
final int topLevelDocId = hitContext.hit().docId();
|
||||
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
|
||||
|
||||
|
||||
for (RescoreSearchContext rescore : context.rescore()) {
|
||||
explanation = rescore.rescorer().explain(topLevelDocId, context, rescore, explanation);
|
||||
}
|
||||
|
|
|
@ -19,11 +19,9 @@
|
|||
|
||||
package org.elasticsearch.search.fetch.innerhits;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
|
@ -43,32 +41,24 @@ import java.io.IOException;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class InnerHitsFetchSubPhase implements FetchSubPhase {
|
||||
|
||||
private final SortParseElement sortParseElement;
|
||||
private final FetchSourceParseElement sourceParseElement;
|
||||
private final HighlighterParseElement highlighterParseElement;
|
||||
private final FieldDataFieldsParseElement fieldDataFieldsParseElement;
|
||||
private final ScriptFieldsParseElement scriptFieldsParseElement;
|
||||
private final Map<String, ? extends SearchParseElement> parseElements;
|
||||
|
||||
private FetchPhase fetchPhase;
|
||||
|
||||
@Inject
|
||||
public InnerHitsFetchSubPhase(SortParseElement sortParseElement, FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement, ScriptFieldsParseElement scriptFieldsParseElement) {
|
||||
this.sortParseElement = sortParseElement;
|
||||
this.sourceParseElement = sourceParseElement;
|
||||
this.highlighterParseElement = highlighterParseElement;
|
||||
this.fieldDataFieldsParseElement = fieldDataFieldsParseElement;
|
||||
this.scriptFieldsParseElement = scriptFieldsParseElement;
|
||||
parseElements = singletonMap("inner_hits", new InnerHitsParseElement(sortParseElement, sourceParseElement, highlighterParseElement,
|
||||
fieldDataFieldsParseElement, scriptFieldsParseElement));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return ImmutableMap.of("inner_hits", new InnerHitsParseElement(
|
||||
sortParseElement, sourceParseElement, highlighterParseElement, fieldDataFieldsParseElement, scriptFieldsParseElement
|
||||
));
|
||||
return parseElements;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.search.fetch.version;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -33,14 +32,17 @@ import org.elasticsearch.search.internal.SearchContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class VersionFetchSubPhase implements FetchSubPhase {
|
||||
private static final Map<String, ? extends SearchParseElement> PARSE_ELEMENTS = singletonMap("version", new VersionParseElement());
|
||||
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return ImmutableMap.of("version", new VersionParseElement());
|
||||
return PARSE_ELEMENTS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.highlight;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -40,12 +39,15 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
||||
|
||||
private static final List<String> STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = Arrays.asList("fvh", "postings", "plain");
|
||||
private static final Map<String, ? extends SearchParseElement> PARSE_ELEMENTS = singletonMap("highlight",
|
||||
new HighlighterParseElement());
|
||||
|
||||
private final Highlighters highlighters;
|
||||
|
||||
|
@ -57,7 +59,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
|||
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return ImmutableMap.of("highlight", new HighlighterParseElement());
|
||||
return PARSE_ELEMENTS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.search.lookup;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -33,6 +32,8 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -148,7 +149,7 @@ public class LeafFieldsLookup implements Map {
|
|||
try {
|
||||
reader.document(docId, fieldVisitor);
|
||||
fieldVisitor.postProcess(data.fieldType());
|
||||
data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.fieldType().names().indexName())));
|
||||
data.fields(singletonMap(name, fieldVisitor.fields().get(data.fieldType().names().indexName())));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("failed to load field [{}]", e, name);
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.benchmark.cluster;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -35,6 +34,7 @@ import org.elasticsearch.test.ESAllocationTestCase;
|
|||
|
||||
import java.util.Random;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
|
||||
public class ClusterAllocationRerouteBenchmark {
|
||||
|
@ -64,7 +64,7 @@ public class ClusterAllocationRerouteBenchmark {
|
|||
RoutingTable routingTable = rb.build();
|
||||
DiscoveryNodes.Builder nb = DiscoveryNodes.builder();
|
||||
for (int i = 1; i <= numberOfNodes; i++) {
|
||||
nb.put(ESAllocationTestCase.newNode("node" + i, numberOfTags == 0 ? ImmutableMap.<String, String>of() : ImmutableMap.of("tag", "tag_" + (i % numberOfTags))));
|
||||
nb.put(ESAllocationTestCase.newNode("node" + i, singletonMap("tag", "tag_" + (i % numberOfTags))));
|
||||
}
|
||||
ClusterState initialClusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).nodes(nb).build();
|
||||
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.cluster.node;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
|
@ -34,7 +32,9 @@ import java.net.InetAddress;
|
|||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
@ -112,16 +112,27 @@ public class DiscoveryNodeFiltersTests extends ESTestCase {
|
|||
.build());
|
||||
DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings);
|
||||
|
||||
Map<String, String> attributes = new HashMap<>();
|
||||
attributes.put("tag", "A");
|
||||
attributes.put("group", "B");
|
||||
DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE,
|
||||
ImmutableMap.of("tag", "A", "group", "B"), Version.CURRENT);
|
||||
attributes, Version.CURRENT);
|
||||
assertThat(filters.match(node), equalTo(true));
|
||||
|
||||
attributes = new HashMap<>();
|
||||
attributes.put("tag", "A");
|
||||
attributes.put("group", "B");
|
||||
attributes.put("name", "X");
|
||||
node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE,
|
||||
ImmutableMap.of("tag", "A", "group", "B", "name", "X"), Version.CURRENT);
|
||||
attributes, Version.CURRENT);
|
||||
assertThat(filters.match(node), equalTo(true));
|
||||
|
||||
attributes = new HashMap<>();
|
||||
attributes.put("tag", "A");
|
||||
attributes.put("group", "F");
|
||||
attributes.put("name", "X");
|
||||
node = new DiscoveryNode("name3", "id3", DummyTransportAddress.INSTANCE,
|
||||
ImmutableMap.of("tag", "A", "group", "F", "name", "X"), Version.CURRENT);
|
||||
attributes, Version.CURRENT);
|
||||
assertThat(filters.match(node), equalTo(false));
|
||||
|
||||
node = new DiscoveryNode("name4", "id4", DummyTransportAddress.INSTANCE, emptyMap(), Version.CURRENT);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.cluster.routing.allocation;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -43,7 +42,10 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.test.ESAllocationTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.*;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -118,7 +120,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase {
|
|||
.put(newNode("node1"))
|
||||
.put(newNode("node2"))
|
||||
.put(newNode("node3"))
|
||||
.put(newNode("node4", ImmutableMap.of("data", Boolean.FALSE.toString())))
|
||||
.put(newNode("node4", singletonMap("data", Boolean.FALSE.toString())))
|
||||
).build();
|
||||
RoutingAllocation.Result rerouteResult = allocation.reroute(clusterState);
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.cluster.routing.allocation;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -38,9 +37,15 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.test.ESAllocationTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.*;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -70,8 +75,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -89,7 +94,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -109,7 +114,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, make sure nothing moves");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
assertThat(routingTable, sameInstance(clusterState.routingTable()));
|
||||
|
@ -139,9 +144,9 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -159,7 +164,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -179,7 +184,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, make sure nothing moves");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node5", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node5", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
assertThat(routingTable, sameInstance(clusterState.routingTable()));
|
||||
|
@ -214,8 +219,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -247,7 +252,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -272,7 +277,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, some more relocation should happen");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -314,8 +319,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -333,7 +338,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -362,7 +367,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, some more relocation should happen");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -406,8 +411,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -425,7 +430,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -445,7 +450,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, we will have another relocation");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -485,10 +490,10 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -506,7 +511,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node5", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node5", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -526,7 +531,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, we will have another relocation");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node6", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node6", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -567,8 +572,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -584,7 +589,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -604,7 +609,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, make sure nothing moves");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
assertThat(routingTable, sameInstance(clusterState.routingTable()));
|
||||
|
@ -635,9 +640,9 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -653,7 +658,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -673,7 +678,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, make sure nothing moves");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node5", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node5", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
assertThat(routingTable, sameInstance(clusterState.routingTable()));
|
||||
|
@ -711,8 +716,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes on same rack and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "1")))
|
||||
.put(newNode("node1", singletonMap("rack_id", "1")))
|
||||
.put(newNode("node2", singletonMap("rack_id", "1")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -726,7 +731,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node with a new rack and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node3", ImmutableMap.of("rack_id", "2")))
|
||||
.put(newNode("node3", singletonMap("rack_id", "2")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -750,7 +755,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add another node with a new rack, some more relocation should happen");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node4", ImmutableMap.of("rack_id", "3")))
|
||||
.put(newNode("node4", singletonMap("rack_id", "3")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -791,8 +796,8 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes in different zones and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("A-0", ImmutableMap.of("zone", "a")))
|
||||
.put(newNode("B-0", ImmutableMap.of("zone", "b")))
|
||||
.put(newNode("A-0", singletonMap("zone", "a")))
|
||||
.put(newNode("B-0", singletonMap("zone", "b")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -813,7 +818,7 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> add a new node in zone 'a' and reroute");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("A-1", ImmutableMap.of("zone", "a")))
|
||||
.put(newNode("A-1", singletonMap("zone", "a")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -853,12 +858,12 @@ public class AwarenessAllocationTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding 5 nodes in different zones and do rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("A-0", ImmutableMap.of("zone", "a")))
|
||||
.put(newNode("A-1", ImmutableMap.of("zone", "a")))
|
||||
.put(newNode("A-2", ImmutableMap.of("zone", "a")))
|
||||
.put(newNode("A-3", ImmutableMap.of("zone", "a")))
|
||||
.put(newNode("A-4", ImmutableMap.of("zone", "a")))
|
||||
.put(newNode("B-0", ImmutableMap.of("zone", "b")))
|
||||
.put(newNode("A-0", singletonMap("zone", "a")))
|
||||
.put(newNode("A-1", singletonMap("zone", "a")))
|
||||
.put(newNode("A-2", singletonMap("zone", "a")))
|
||||
.put(newNode("A-3", singletonMap("zone", "a")))
|
||||
.put(newNode("A-4", singletonMap("zone", "a")))
|
||||
.put(newNode("B-0", singletonMap("zone", "b")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
|
|
@ -19,14 +19,13 @@
|
|||
|
||||
package org.elasticsearch.cluster.routing.allocation;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -36,6 +35,7 @@ import org.junit.Test;
|
|||
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -67,10 +67,10 @@ public class FilterRoutingTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding four nodes and performing rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("tag1", "value1")))
|
||||
.put(newNode("node2", ImmutableMap.of("tag1", "value2")))
|
||||
.put(newNode("node3", ImmutableMap.of("tag1", "value3")))
|
||||
.put(newNode("node4", ImmutableMap.of("tag1", "value4")))
|
||||
.put(newNode("node1", singletonMap("tag1", "value1")))
|
||||
.put(newNode("node2", singletonMap("tag1", "value2")))
|
||||
.put(newNode("node3", singletonMap("tag1", "value3")))
|
||||
.put(newNode("node4", singletonMap("tag1", "value4")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -116,10 +116,10 @@ public class FilterRoutingTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("--> adding two nodes and performing rerouting");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("tag1", "value1")))
|
||||
.put(newNode("node2", ImmutableMap.of("tag1", "value2")))
|
||||
.put(newNode("node3", ImmutableMap.of("tag1", "value3")))
|
||||
.put(newNode("node4", ImmutableMap.of("tag1", "value4")))
|
||||
.put(newNode("node1", singletonMap("tag1", "value1")))
|
||||
.put(newNode("node2", singletonMap("tag1", "value2")))
|
||||
.put(newNode("node3", singletonMap("tag1", "value3")))
|
||||
.put(newNode("node4", singletonMap("tag1", "value4")))
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
|
|
@ -18,18 +18,21 @@
|
|||
*/
|
||||
package org.elasticsearch.cluster.routing.allocation;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.test.ESAllocationTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.*;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -65,8 +68,8 @@ public class PreferLocalPrimariesToRelocatingPrimariesTests extends ESAllocation
|
|||
|
||||
logger.info("adding two nodes and performing rerouting till all are allocated");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("tag1", "value1")))
|
||||
.put(newNode("node2", ImmutableMap.of("tag1", "value2")))).build();
|
||||
.put(newNode("node1", singletonMap("tag1", "value1")))
|
||||
.put(newNode("node2", singletonMap("tag1", "value2")))).build();
|
||||
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
@ -101,7 +104,7 @@ public class PreferLocalPrimariesToRelocatingPrimariesTests extends ESAllocation
|
|||
|
||||
logger.info("start node back up");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
|
||||
.put(newNode("node1", ImmutableMap.of("tag1", "value1")))).build();
|
||||
.put(newNode("node1", singletonMap("tag1", "value1")))).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
||||
|
|
|
@ -19,28 +19,43 @@
|
|||
|
||||
package org.elasticsearch.cluster.structure;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.*;
|
||||
import org.elasticsearch.cluster.routing.GroupShardsIterator;
|
||||
import org.elasticsearch.cluster.routing.OperationRouting;
|
||||
import org.elasticsearch.cluster.routing.PlainShardIterator;
|
||||
import org.elasticsearch.cluster.routing.RotationShardShuffler;
|
||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardIterator;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardShuffler;
|
||||
import org.elasticsearch.cluster.routing.ShardsIterator;
|
||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.OperationRouting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.ESAllocationTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
||||
public class RoutingIteratorTests extends ESAllocationTestCase {
|
||||
|
||||
|
@ -231,9 +246,15 @@ public class RoutingIteratorTests extends ESAllocationTestCase {
|
|||
|
||||
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
|
||||
|
||||
Map<String, String> node1Attributes = new HashMap<>();
|
||||
node1Attributes.put("rack_id", "rack_1");
|
||||
node1Attributes.put("zone", "zone1");
|
||||
Map<String, String> node2Attributes = new HashMap<>();
|
||||
node2Attributes.put("rack_id", "rack_2");
|
||||
node2Attributes.put("zone", "zone2");
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("node1", ImmutableMap.of("rack_id", "rack_1", "zone", "zone1")))
|
||||
.put(newNode("node2", ImmutableMap.of("rack_id", "rack_2", "zone", "zone2")))
|
||||
.put(newNode("node1", unmodifiableMap(node1Attributes)))
|
||||
.put(newNode("node2", unmodifiableMap(node2Attributes)))
|
||||
.localNodeId("node1")
|
||||
).build();
|
||||
routingTable = strategy.reroute(clusterState).routingTable();
|
||||
|
@ -281,8 +302,8 @@ public class RoutingIteratorTests extends ESAllocationTestCase {
|
|||
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build();
|
||||
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
|
||||
.put(newNode("fred","node1", ImmutableMap.of("disk", "ebs")))
|
||||
.put(newNode("barney","node2", ImmutableMap.of("disk", "ephemeral")))
|
||||
.put(newNode("fred","node1", singletonMap("disk", "ebs")))
|
||||
.put(newNode("barney","node2", singletonMap("disk", "ephemeral")))
|
||||
.localNodeId("node1")
|
||||
).build();
|
||||
|
||||
|
@ -314,7 +335,7 @@ public class RoutingIteratorTests extends ESAllocationTestCase {
|
|||
} catch (IllegalArgumentException illegal) {
|
||||
//expected exception
|
||||
}
|
||||
|
||||
|
||||
shardsIterator = clusterState.routingTable().index("test").shard(0).onlyNodeSelectorActiveInitializingShardsIt("fred",clusterState.nodes());
|
||||
assertThat(shardsIterator.size(), equalTo(1));
|
||||
assertThat(shardsIterator.nextOrNull().currentNodeId(),equalTo("node1"));
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent.support;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -39,7 +37,12 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
|
||||
/**
|
||||
|
@ -561,7 +564,7 @@ public class XContentMapValuesTests extends ESTestCase {
|
|||
assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken());
|
||||
}
|
||||
assertEquals(
|
||||
Arrays.asList(ImmutableMap.of("foo", "bar"), Collections.<String, Object>emptyMap()),
|
||||
Arrays.asList(singletonMap("foo", "bar"), emptyMap()),
|
||||
parser.list());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.fetch;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -28,10 +27,7 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest;
|
|||
import org.elasticsearch.action.termvectors.TermVectorsResponse;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
|
@ -50,8 +46,8 @@ import java.util.Collection;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.client.Requests.indexRequest;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -139,7 +135,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return ImmutableMap.of("term_vectors_fetch", new TermVectorsFetchParseElement());
|
||||
return singletonMap("term_vectors_fetch", new TermVectorsFetchParseElement());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.query;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -32,6 +30,7 @@ import org.elasticsearch.search.SearchHit;
|
|||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
@ -92,11 +91,14 @@ public class ExistsMissingIT extends ESIntegTestCase {
|
|||
|
||||
assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", mapping));
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> barObject = new HashMap<>();
|
||||
barObject.put("foo", "bar");
|
||||
barObject.put("bar", singletonMap("bar", "foo"));
|
||||
final Map<String, Object>[] sources = new Map[] {
|
||||
// simple property
|
||||
singletonMap("foo", "bar"),
|
||||
// object fields
|
||||
singletonMap("bar", ImmutableMap.of("foo", "bar", "bar", singletonMap("bar", "foo"))),
|
||||
singletonMap("bar", barObject),
|
||||
singletonMap("bar", singletonMap("baz", 42)),
|
||||
// empty doc
|
||||
emptyMap()
|
||||
|
|
|
@ -130,6 +130,11 @@ com.google.common.primitives.Ints
|
|||
com.google.common.collect.ImmutableSet
|
||||
com.google.common.collect.ImmutableSet$Builder
|
||||
com.google.common.collect.ImmutableMap#of()
|
||||
com.google.common.collect.ImmutableMap#of(java.lang.Object, java.lang.Object)
|
||||
com.google.common.collect.ImmutableMap#of(java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object)
|
||||
com.google.common.collect.ImmutableMap#of(java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object)
|
||||
com.google.common.collect.ImmutableMap#of(java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object)
|
||||
com.google.common.collect.ImmutableMap#of(java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object, java.lang.Object)
|
||||
|
||||
@defaultMessage Do not violate java's access system
|
||||
java.lang.reflect.AccessibleObject#setAccessible(boolean)
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -42,6 +40,7 @@ import java.util.Collections;
|
|||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists;
|
||||
|
@ -58,12 +57,12 @@ import static org.hamcrest.Matchers.not;
|
|||
*/
|
||||
@SuppressCodecs("*") // requires custom completion format
|
||||
public class TransformOnIndexMapperTests extends ESIntegTestCase {
|
||||
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void searchOnTransformed() throws Exception {
|
||||
setup(true);
|
||||
|
@ -171,7 +170,7 @@ public class TransformOnIndexMapperTests extends ESIntegTestCase {
|
|||
if (getRandom().nextBoolean()) {
|
||||
script = script.replace("sourceField", "'content'");
|
||||
} else {
|
||||
builder.field("params", ImmutableMap.of("sourceField", "content"));
|
||||
builder.field("params", singletonMap("sourceField", "content"));
|
||||
}
|
||||
builder.field("script", script);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue