parent
472cc0af08
commit
491b00c4ec
9
pom.xml
9
pom.xml
|
@ -226,7 +226,14 @@
|
|||
<dependency>
|
||||
<groupId>com.carrotsearch</groupId>
|
||||
<artifactId>hppc</artifactId>
|
||||
<version>0.6.0</version>
|
||||
<version>0.7.1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency> <!-- ES uses byte* hashes -->
|
||||
<groupId>com.carrotsearch</groupId>
|
||||
<artifactId>hppc</artifactId>
|
||||
<version>0.7.1</version>
|
||||
<classifier>esoteric</classifier>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
|
|
@ -827,10 +827,7 @@ public class MapperQueryParser extends QueryParser {
|
|||
|
||||
private void applyBoost(String field, Query q) {
|
||||
if (settings.boosts() != null) {
|
||||
float boost = 1f;
|
||||
if (settings.boosts().containsKey(field)) {
|
||||
boost = settings.boosts().lget();
|
||||
}
|
||||
float boost = settings.boosts().getOrDefault(field, 1f);
|
||||
q.setBoost(boost);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.apache.lucene.queryparser.classic;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
|
@ -69,7 +69,7 @@ public class QueryParserSettings {
|
|||
|
||||
List<String> fields = null;
|
||||
Collection<String> queryTypes = null;
|
||||
ObjectFloatOpenHashMap<String> boosts = null;
|
||||
ObjectFloatHashMap<String> boosts = null;
|
||||
float tieBreaker = 0.0f;
|
||||
boolean useDisMax = true;
|
||||
|
||||
|
@ -286,11 +286,11 @@ public class QueryParserSettings {
|
|||
this.queryTypes = queryTypes;
|
||||
}
|
||||
|
||||
public ObjectFloatOpenHashMap<String> boosts() {
|
||||
public ObjectFloatHashMap<String> boosts() {
|
||||
return boosts;
|
||||
}
|
||||
|
||||
public void boosts(ObjectFloatOpenHashMap<String> boosts) {
|
||||
public void boosts(ObjectFloatHashMap<String> boosts) {
|
||||
this.boosts = boosts;
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.apache.lucene.search.suggest.analyzing;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
|
@ -1008,7 +1008,7 @@ public long ramBytesUsed() {
|
|||
private BytesRefBuilder analyzed = new BytesRefBuilder();
|
||||
private final SurfaceFormAndPayload[] surfaceFormsAndPayload;
|
||||
private int count;
|
||||
private ObjectIntOpenHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f);
|
||||
private ObjectIntHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f);
|
||||
private int payloadSep;
|
||||
|
||||
public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads, int payloadSep) {
|
||||
|
@ -1061,9 +1061,11 @@ public long ramBytesUsed() {
|
|||
// dups: skip the rest:
|
||||
return;
|
||||
}
|
||||
|
||||
BytesRef surfaceCopy;
|
||||
if (count > 0 && seenSurfaceForms.containsKey(surface)) {
|
||||
surfaceIndex = seenSurfaceForms.lget();
|
||||
final int keySlot;
|
||||
if (count > 0 && (keySlot = seenSurfaceForms.indexOf(surface)) >= 0) {
|
||||
surfaceIndex = seenSurfaceForms.indexGet(keySlot);
|
||||
SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex];
|
||||
if (encodedWeight >= surfaceFormAndPayload.weight) {
|
||||
return;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.admin.cluster.stats;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
|
@ -57,7 +57,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
|
|||
}
|
||||
|
||||
public ClusterStatsIndices(ClusterStatsNodeResponse[] nodeResponses) {
|
||||
ObjectObjectOpenHashMap<String, ShardStats> countsPerIndex = new ObjectObjectOpenHashMap<>();
|
||||
ObjectObjectHashMap<String, ShardStats> countsPerIndex = new ObjectObjectHashMap<>();
|
||||
|
||||
this.docs = new DocsStats();
|
||||
this.store = new StoreStats();
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.admin.cluster.stats;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
|
@ -303,10 +303,10 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
|
||||
int availableProcessors;
|
||||
long availableMemory;
|
||||
ObjectIntOpenHashMap<OsInfo.Cpu> cpus;
|
||||
ObjectIntHashMap<OsInfo.Cpu> cpus;
|
||||
|
||||
public OsStats() {
|
||||
cpus = new ObjectIntOpenHashMap<>();
|
||||
cpus = new ObjectIntHashMap<>();
|
||||
}
|
||||
|
||||
public void addNodeInfo(NodeInfo nodeInfo) {
|
||||
|
@ -330,7 +330,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
return new ByteSizeValue(availableMemory);
|
||||
}
|
||||
|
||||
public ObjectIntOpenHashMap<OsInfo.Cpu> getCpus() {
|
||||
public ObjectIntHashMap<OsInfo.Cpu> getCpus() {
|
||||
return cpus;
|
||||
}
|
||||
|
||||
|
@ -339,7 +339,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
availableProcessors = in.readVInt();
|
||||
availableMemory = in.readLong();
|
||||
int size = in.readVInt();
|
||||
cpus = new ObjectIntOpenHashMap<>(size);
|
||||
cpus = new ObjectIntHashMap<>(size);
|
||||
for (; size > 0; size--) {
|
||||
cpus.addTo(OsInfo.Cpu.readCpu(in), in.readVInt());
|
||||
}
|
||||
|
@ -496,21 +496,21 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
|
||||
public static class JvmStats implements Streamable, ToXContent {
|
||||
|
||||
ObjectIntOpenHashMap<JvmVersion> versions;
|
||||
ObjectIntHashMap<JvmVersion> versions;
|
||||
long threads;
|
||||
long maxUptime;
|
||||
long heapUsed;
|
||||
long heapMax;
|
||||
|
||||
JvmStats() {
|
||||
versions = new ObjectIntOpenHashMap<>();
|
||||
versions = new ObjectIntHashMap<>();
|
||||
threads = 0;
|
||||
maxUptime = 0;
|
||||
heapMax = 0;
|
||||
heapUsed = 0;
|
||||
}
|
||||
|
||||
public ObjectIntOpenHashMap<JvmVersion> getVersions() {
|
||||
public ObjectIntHashMap<JvmVersion> getVersions() {
|
||||
return versions;
|
||||
}
|
||||
|
||||
|
@ -561,7 +561,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
versions = new ObjectIntOpenHashMap<>(size);
|
||||
versions = new ObjectIntHashMap<>(size);
|
||||
for (; size > 0; size--) {
|
||||
versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt());
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.mapping.put;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
|
@ -51,7 +51,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
|||
*/
|
||||
public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> implements IndicesRequest.Replaceable {
|
||||
|
||||
private static ObjectOpenHashSet<String> RESERVED_FIELDS = ObjectOpenHashSet.from(
|
||||
private static ObjectHashSet<String> RESERVED_FIELDS = ObjectHashSet.from(
|
||||
"_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index",
|
||||
"_size", "_timestamp", "_ttl"
|
||||
);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.termvectors;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
|
||||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
|
@ -113,7 +113,7 @@ import static org.apache.lucene.util.ArrayUtil.grow;
|
|||
|
||||
public final class TermVectorsFields extends Fields {
|
||||
|
||||
private final ObjectLongOpenHashMap<String> fieldMap;
|
||||
private final ObjectLongHashMap<String> fieldMap;
|
||||
private final BytesReference termVectors;
|
||||
final boolean hasTermStatistic;
|
||||
final boolean hasFieldStatistic;
|
||||
|
@ -126,7 +126,7 @@ public final class TermVectorsFields extends Fields {
|
|||
*/
|
||||
public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
|
||||
BytesStreamInput header = new BytesStreamInput(headerRef);
|
||||
fieldMap = new ObjectLongOpenHashMap<>();
|
||||
fieldMap = new ObjectLongHashMap<>();
|
||||
|
||||
// here we read the header to fill the field offset map
|
||||
String headerString = header.readString();
|
||||
|
@ -170,10 +170,11 @@ public final class TermVectorsFields extends Fields {
|
|||
public Terms terms(String field) throws IOException {
|
||||
// first, find where in the termVectors bytes the actual term vector for
|
||||
// this field is stored
|
||||
if (!fieldMap.containsKey(field)) {
|
||||
final int keySlot = fieldMap.indexOf(field);
|
||||
if (keySlot < 0) {
|
||||
return null; // we don't have it.
|
||||
}
|
||||
long readOffset = fieldMap.lget();
|
||||
long readOffset = fieldMap.indexGet(keySlot);
|
||||
return new TermVector(termVectors, readOffset);
|
||||
}
|
||||
|
||||
|
|
|
@ -20,11 +20,13 @@
|
|||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.*;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.cluster.*;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.DiffableUtils.KeyedReader;
|
||||
|
@ -295,7 +297,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
|||
|
||||
boolean matchAllAliases = matchAllAliases(aliases);
|
||||
ImmutableOpenMap.Builder<String, ImmutableList<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys());
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||
for (String index : intersection) {
|
||||
IndexMetaData indexMetaData = indices.get(index);
|
||||
List<AliasMetaData> filteredValues = Lists.newArrayList();
|
||||
|
@ -307,6 +309,13 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
|||
}
|
||||
|
||||
if (!filteredValues.isEmpty()) {
|
||||
// Make the list order deterministic
|
||||
CollectionUtil.timSort(filteredValues, new Comparator<AliasMetaData>() {
|
||||
@Override
|
||||
public int compare(AliasMetaData o1, AliasMetaData o2) {
|
||||
return o1.alias().compareTo(o2.alias());
|
||||
}
|
||||
});
|
||||
mapBuilder.put(index, ImmutableList.copyOf(filteredValues));
|
||||
}
|
||||
}
|
||||
|
@ -337,7 +346,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
|||
return false;
|
||||
}
|
||||
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys());
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||
for (String index : intersection) {
|
||||
IndexMetaData indexMetaData = indices.get(index);
|
||||
List<AliasMetaData> filteredValues = Lists.newArrayList();
|
||||
|
@ -368,7 +377,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
|||
}
|
||||
|
||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder = ImmutableOpenMap.builder();
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys());
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||
for (String index : intersection) {
|
||||
IndexMetaData indexMetaData = indices.get(index);
|
||||
ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings;
|
||||
|
@ -400,7 +409,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
|||
final String[] warmers = Strings.isAllOrWildcard(uncheckedWarmers) ? Strings.EMPTY_ARRAY : uncheckedWarmers;
|
||||
|
||||
ImmutableOpenMap.Builder<String, ImmutableList<IndexWarmersMetaData.Entry>> mapBuilder = ImmutableOpenMap.builder();
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys());
|
||||
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||
for (String index : intersection) {
|
||||
IndexMetaData indexMetaData = indices.get(index);
|
||||
IndexWarmersMetaData indexWarmersMetaData = indexMetaData.custom(IndexWarmersMetaData.TYPE);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.cluster.node;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
@ -334,7 +334,7 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
|
|||
}
|
||||
return nodesIds;
|
||||
} else {
|
||||
ObjectOpenHashSet<String> resolvedNodesIds = new ObjectOpenHashSet<>(nodesIds.length);
|
||||
ObjectHashSet<String> resolvedNodesIds = new ObjectHashSet<>(nodesIds.length);
|
||||
for (String nodeId : nodesIds) {
|
||||
if (nodeId.equals("_local")) {
|
||||
String localNodeId = localNodeId();
|
||||
|
|
|
@ -25,6 +25,8 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
|||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.collect.UnmodifiableIterator;
|
||||
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
|
@ -35,11 +37,13 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
import static com.google.common.collect.Lists.newArrayList;
|
||||
import static com.google.common.collect.Lists.*;
|
||||
|
||||
/**
|
||||
* The {@link IndexRoutingTable} represents routing information for a single
|
||||
|
@ -540,7 +544,26 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
|
|||
|
||||
public String prettyPrint() {
|
||||
StringBuilder sb = new StringBuilder("-- index [" + index + "]\n");
|
||||
|
||||
List<IndexShardRoutingTable> ordered = new ArrayList<>();
|
||||
for (IndexShardRoutingTable indexShard : this) {
|
||||
ordered.add(indexShard);
|
||||
}
|
||||
|
||||
CollectionUtil.timSort(ordered, new Comparator<IndexShardRoutingTable>() {
|
||||
@Override
|
||||
public int compare(IndexShardRoutingTable o1, IndexShardRoutingTable o2) {
|
||||
int v = o1.shardId().index().name().compareTo(
|
||||
o2.shardId().index().name());
|
||||
if (v == 0) {
|
||||
v = Integer.compare(o1.shardId().id(),
|
||||
o2.shardId().id());
|
||||
}
|
||||
return v;
|
||||
}
|
||||
});
|
||||
|
||||
for (IndexShardRoutingTable indexShard : ordered) {
|
||||
sb.append("----shard_id [").append(indexShard.shardId().index().name()).append("][").append(indexShard.shardId().id()).append("]\n");
|
||||
for (ShardRouting shard : indexShard) {
|
||||
sb.append("--------").append(shard.shortSummary()).append("\n");
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.cluster.routing;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.*;
|
||||
|
@ -64,7 +64,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
|
|||
|
||||
private Set<ShardId> clearPostAllocationFlag;
|
||||
|
||||
private final Map<String, ObjectIntOpenHashMap<String>> nodesPerAttributeNames = new HashMap<>();
|
||||
private final Map<String, ObjectIntHashMap<String>> nodesPerAttributeNames = new HashMap<>();
|
||||
|
||||
public RoutingNodes(ClusterState clusterState) {
|
||||
this.metaData = clusterState.metaData();
|
||||
|
@ -208,12 +208,12 @@ public class RoutingNodes implements Iterable<RoutingNode> {
|
|||
return nodesToShards.get(nodeId);
|
||||
}
|
||||
|
||||
public ObjectIntOpenHashMap<String> nodesPerAttributesCounts(String attributeName) {
|
||||
ObjectIntOpenHashMap<String> nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName);
|
||||
public ObjectIntHashMap<String> nodesPerAttributesCounts(String attributeName) {
|
||||
ObjectIntHashMap<String> nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName);
|
||||
if (nodesPerAttributesCounts != null) {
|
||||
return nodesPerAttributesCounts;
|
||||
}
|
||||
nodesPerAttributesCounts = new ObjectIntOpenHashMap<>();
|
||||
nodesPerAttributesCounts = new ObjectIntHashMap<>();
|
||||
for (RoutingNode routingNode : this) {
|
||||
String attrValue = routingNode.node().attributes().get(attributeName);
|
||||
nodesPerAttributesCounts.addTo(attrValue, 1);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.cluster.routing.allocation.decider;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.MutableShardRouting;
|
||||
|
@ -182,10 +182,10 @@ public class AwarenessAllocationDecider extends AllocationDecider {
|
|||
}
|
||||
|
||||
// build attr_value -> nodes map
|
||||
ObjectIntOpenHashMap<String> nodesPerAttribute = allocation.routingNodes().nodesPerAttributesCounts(awarenessAttribute);
|
||||
ObjectIntHashMap<String> nodesPerAttribute = allocation.routingNodes().nodesPerAttributesCounts(awarenessAttribute);
|
||||
|
||||
// build the count of shards per attribute value
|
||||
ObjectIntOpenHashMap<String> shardPerAttribute = new ObjectIntOpenHashMap<>();
|
||||
ObjectIntHashMap<String> shardPerAttribute = new ObjectIntHashMap<>();
|
||||
for (MutableShardRouting assignedShard : allocation.routingNodes().assignedShards(shardRouting)) {
|
||||
// if the shard is relocating, then make sure we count it as part of the node it is relocating to
|
||||
if (assignedShard.relocating()) {
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.common;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
|
||||
/**
|
||||
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
|
|||
*/
|
||||
public class ContextHolder {
|
||||
|
||||
private ObjectObjectOpenHashMap<Object, Object> context;
|
||||
private ObjectObjectHashMap<Object, Object> context;
|
||||
|
||||
/**
|
||||
* Attaches the given value to the context.
|
||||
|
@ -39,7 +39,7 @@ public class ContextHolder {
|
|||
@SuppressWarnings("unchecked")
|
||||
public final synchronized <V> V putInContext(Object key, Object value) {
|
||||
if (context == null) {
|
||||
context = new ObjectObjectOpenHashMap<>(2);
|
||||
context = new ObjectObjectHashMap<>(2);
|
||||
}
|
||||
return (V) context.put(key, value);
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ public class ContextHolder {
|
|||
return;
|
||||
}
|
||||
if (context == null) {
|
||||
context = new ObjectObjectOpenHashMap<>(map);
|
||||
context = new ObjectObjectHashMap<>(map);
|
||||
} else {
|
||||
context.putAll(map);
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ public class ContextHolder {
|
|||
return;
|
||||
}
|
||||
if (context == null) {
|
||||
context = new ObjectObjectOpenHashMap<>(other.context);
|
||||
context = new ObjectObjectHashMap<>(other.context);
|
||||
} else {
|
||||
context.putAll(other.context);
|
||||
}
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
|
||||
package org.elasticsearch.common.collect;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLookupContainer;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
@ -34,40 +34,50 @@ public final class HppcMaps {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns a new map with the given initial capacity
|
||||
* Returns a new map with the given number of expected elements.
|
||||
*
|
||||
* @param expectedElements
|
||||
* The expected number of elements guaranteed not to cause buffer
|
||||
* expansion (inclusive).
|
||||
*/
|
||||
public static <K, V> ObjectObjectOpenHashMap<K, V> newMap(int capacity) {
|
||||
return new ObjectObjectOpenHashMap<>(capacity);
|
||||
public static <K, V> ObjectObjectHashMap<K, V> newMap(int expectedElements) {
|
||||
return new ObjectObjectHashMap<>(expectedElements);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new map with a default initial capacity of
|
||||
* {@value com.carrotsearch.hppc.HashContainerUtils#DEFAULT_CAPACITY}
|
||||
* Returns a new map with a default initial capacity.
|
||||
*/
|
||||
public static <K, V> ObjectObjectOpenHashMap<K, V> newMap() {
|
||||
public static <K, V> ObjectObjectHashMap<K, V> newMap() {
|
||||
return newMap(16);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map like {@link #newMap()} that does not accept <code>null</code> keys
|
||||
*/
|
||||
public static <K, V> ObjectObjectOpenHashMap<K, V> newNoNullKeysMap() {
|
||||
public static <K, V> ObjectObjectHashMap<K, V> newNoNullKeysMap() {
|
||||
return ensureNoNullKeys(16);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a map like {@link #newMap(int)} that does not accept <code>null</code> keys
|
||||
*
|
||||
* @param expectedElements
|
||||
* The expected number of elements guaranteed not to cause buffer
|
||||
* expansion (inclusive).
|
||||
*/
|
||||
public static <K, V> ObjectObjectOpenHashMap<K, V> newNoNullKeysMap(int capacity) {
|
||||
return ensureNoNullKeys(capacity);
|
||||
public static <K, V> ObjectObjectHashMap<K, V> newNoNullKeysMap(int expectedElements) {
|
||||
return ensureNoNullKeys(expectedElements);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps the given map and prevent adding of <code>null</code> keys.
|
||||
*
|
||||
* @param expectedElements
|
||||
* The expected number of elements guaranteed not to cause buffer
|
||||
* expansion (inclusive).
|
||||
*/
|
||||
public static <K, V> ObjectObjectOpenHashMap<K, V> ensureNoNullKeys(int capacity) {
|
||||
return new ObjectObjectOpenHashMap<K, V>(capacity) {
|
||||
|
||||
public static <K, V> ObjectObjectHashMap<K, V> ensureNoNullKeys(int expectedElements) {
|
||||
return new ObjectObjectHashMap<K, V>(expectedElements) {
|
||||
@Override
|
||||
public V put(K key, V value) {
|
||||
if (key == null) {
|
||||
|
@ -75,12 +85,11 @@ public final class HppcMaps {
|
|||
}
|
||||
return super.put(key, value);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @return an intersection view over the two specified containers (which can be KeyContainer or ObjectOpenHashSet).
|
||||
* @return an intersection view over the two specified containers (which can be KeyContainer or ObjectHashSet).
|
||||
*/
|
||||
// Hppc has forEach, but this means we need to build an intermediate set, with this method we just iterate
|
||||
// over each unique value without creating a third set.
|
||||
|
@ -124,12 +133,9 @@ public final class HppcMaps {
|
|||
}
|
||||
|
||||
public final static class Object {
|
||||
|
||||
public final static class Integer {
|
||||
|
||||
public static <V> ObjectIntOpenHashMap<V> ensureNoNullKeys(int capacity, float loadFactor) {
|
||||
return new ObjectIntOpenHashMap<V>(capacity, loadFactor) {
|
||||
|
||||
public static <V> ObjectIntHashMap<V> ensureNoNullKeys(int capacity, float loadFactor) {
|
||||
return new ObjectIntHashMap<V>(capacity, loadFactor) {
|
||||
@Override
|
||||
public int put(V key, int value) {
|
||||
if (key == null) {
|
||||
|
@ -139,9 +145,6 @@ public final class HppcMaps {
|
|||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.*;
|
|||
import com.carrotsearch.hppc.cursors.IntCursor;
|
||||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.predicates.IntObjectPredicate;
|
||||
import com.carrotsearch.hppc.predicates.IntPredicate;
|
||||
import com.carrotsearch.hppc.procedures.IntObjectProcedure;
|
||||
import com.google.common.collect.UnmodifiableIterator;
|
||||
|
@ -38,9 +39,9 @@ import java.util.Map;
|
|||
*/
|
||||
public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCursor<VType>> {
|
||||
|
||||
private final IntObjectOpenHashMap<VType> map;
|
||||
private final IntObjectHashMap<VType> map;
|
||||
|
||||
private ImmutableOpenIntMap(IntObjectOpenHashMap<VType> map) {
|
||||
private ImmutableOpenIntMap(IntObjectHashMap<VType> map) {
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
|
@ -175,7 +176,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ImmutableOpenIntMap EMPTY = new ImmutableOpenIntMap(new IntObjectOpenHashMap());
|
||||
private static final ImmutableOpenIntMap EMPTY = new ImmutableOpenIntMap(new IntObjectHashMap());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <VType> ImmutableOpenIntMap<VType> of() {
|
||||
|
@ -196,7 +197,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
|||
|
||||
public static class Builder<VType> implements IntObjectMap<VType> {
|
||||
|
||||
private IntObjectOpenHashMap<VType> map;
|
||||
private IntObjectHashMap<VType> map;
|
||||
|
||||
public Builder() {
|
||||
//noinspection unchecked
|
||||
|
@ -204,7 +205,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
|||
}
|
||||
|
||||
public Builder(int size) {
|
||||
this.map = new IntObjectOpenHashMap<>(size);
|
||||
this.map = new IntObjectHashMap<>(size);
|
||||
}
|
||||
|
||||
public Builder(ImmutableOpenIntMap<VType> map) {
|
||||
|
@ -215,7 +216,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
|||
* Builds a new instance of the
|
||||
*/
|
||||
public ImmutableOpenIntMap<VType> build() {
|
||||
IntObjectOpenHashMap<VType> map = this.map;
|
||||
IntObjectHashMap<VType> map = this.map;
|
||||
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
||||
return new ImmutableOpenIntMap<>(map);
|
||||
}
|
||||
|
@ -325,5 +326,50 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
|||
public ObjectContainer<VType> values() {
|
||||
return map.values();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int removeAll(IntObjectPredicate<? super VType> predicate) {
|
||||
return map.removeAll(predicate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends IntObjectPredicate<? super VType>> T forEach(T predicate) {
|
||||
return map.forEach(predicate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(int key) {
|
||||
return map.indexOf(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean indexExists(int index) {
|
||||
return map.indexExists(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VType indexGet(int index) {
|
||||
return map.indexGet(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VType indexReplace(int index, VType newValue) {
|
||||
return map.indexReplace(index, newValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void indexInsert(int index, int key, VType value) {
|
||||
map.indexInsert(index, key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
map.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String visualizeKeyDistribution(int characters) {
|
||||
return map.visualizeKeyDistribution(characters);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,8 @@ import com.carrotsearch.hppc.*;
|
|||
import com.carrotsearch.hppc.cursors.LongCursor;
|
||||
import com.carrotsearch.hppc.cursors.LongObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.predicates.IntObjectPredicate;
|
||||
import com.carrotsearch.hppc.predicates.LongObjectPredicate;
|
||||
import com.carrotsearch.hppc.predicates.LongPredicate;
|
||||
import com.carrotsearch.hppc.procedures.LongObjectProcedure;
|
||||
import com.google.common.collect.UnmodifiableIterator;
|
||||
|
@ -38,9 +40,9 @@ import java.util.Map;
|
|||
*/
|
||||
public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCursor<VType>> {
|
||||
|
||||
private final LongObjectOpenHashMap<VType> map;
|
||||
private final LongObjectHashMap<VType> map;
|
||||
|
||||
private ImmutableOpenLongMap(LongObjectOpenHashMap<VType> map) {
|
||||
private ImmutableOpenLongMap(LongObjectHashMap<VType> map) {
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
|
@ -175,7 +177,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ImmutableOpenLongMap EMPTY = new ImmutableOpenLongMap(new LongObjectOpenHashMap());
|
||||
private static final ImmutableOpenLongMap EMPTY = new ImmutableOpenLongMap(new LongObjectHashMap());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <VType> ImmutableOpenLongMap<VType> of() {
|
||||
|
@ -196,7 +198,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
|||
|
||||
public static class Builder<VType> implements LongObjectMap<VType> {
|
||||
|
||||
private LongObjectOpenHashMap<VType> map;
|
||||
private LongObjectHashMap<VType> map;
|
||||
|
||||
public Builder() {
|
||||
//noinspection unchecked
|
||||
|
@ -204,7 +206,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
|||
}
|
||||
|
||||
public Builder(int size) {
|
||||
this.map = new LongObjectOpenHashMap<>(size);
|
||||
this.map = new LongObjectHashMap<>(size);
|
||||
}
|
||||
|
||||
public Builder(ImmutableOpenLongMap<VType> map) {
|
||||
|
@ -215,7 +217,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
|||
* Builds a new instance of the
|
||||
*/
|
||||
public ImmutableOpenLongMap<VType> build() {
|
||||
LongObjectOpenHashMap<VType> map = this.map;
|
||||
LongObjectHashMap<VType> map = this.map;
|
||||
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
||||
return new ImmutableOpenLongMap<>(map);
|
||||
}
|
||||
|
@ -311,11 +313,6 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
|||
return map.removeAll(predicate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends LongObjectProcedure<? super VType>> T forEach(T procedure) {
|
||||
return map.forEach(procedure);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongCollection keys() {
|
||||
return map.keys();
|
||||
|
@ -325,5 +322,55 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
|||
public ObjectContainer<VType> values() {
|
||||
return map.values();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends LongObjectProcedure<? super VType>> T forEach(T procedure) {
|
||||
return map.forEach(procedure);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(long key) {
|
||||
return map.indexOf(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean indexExists(int index) {
|
||||
return map.indexExists(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VType indexGet(int index) {
|
||||
return map.indexGet(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VType indexReplace(int index, VType newValue) {
|
||||
return map.indexReplace(index, newValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void indexInsert(int index, long key, VType value) {
|
||||
map.indexInsert(index, key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
map.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String visualizeKeyDistribution(int characters) {
|
||||
return map.visualizeKeyDistribution(characters);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int removeAll(LongObjectPredicate<? super VType> predicate) {
|
||||
return map.removeAll(predicate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends LongObjectPredicate<? super VType>> T forEach(T predicate) {
|
||||
return map.forEach(predicate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.common.collect;
|
|||
import com.carrotsearch.hppc.*;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.carrotsearch.hppc.predicates.ObjectObjectPredicate;
|
||||
import com.carrotsearch.hppc.predicates.ObjectPredicate;
|
||||
import com.carrotsearch.hppc.procedures.ObjectObjectProcedure;
|
||||
import com.google.common.collect.UnmodifiableIterator;
|
||||
|
@ -37,9 +38,9 @@ import java.util.Map;
|
|||
*/
|
||||
public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObjectCursor<KType, VType>> {
|
||||
|
||||
private final ObjectObjectOpenHashMap<KType, VType> map;
|
||||
private final ObjectObjectHashMap<KType, VType> map;
|
||||
|
||||
private ImmutableOpenMap(ObjectObjectOpenHashMap<KType, VType> map) {
|
||||
private ImmutableOpenMap(ObjectObjectHashMap<KType, VType> map) {
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
|
@ -182,7 +183,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ImmutableOpenMap EMPTY = new ImmutableOpenMap(new ObjectObjectOpenHashMap());
|
||||
private static final ImmutableOpenMap EMPTY = new ImmutableOpenMap(new ObjectObjectHashMap());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <KType, VType> ImmutableOpenMap<KType, VType> of() {
|
||||
|
@ -211,8 +212,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
|||
}
|
||||
|
||||
public static class Builder<KType, VType> implements ObjectObjectMap<KType, VType> {
|
||||
|
||||
private ObjectObjectOpenHashMap<KType, VType> map;
|
||||
private ObjectObjectHashMap<KType, VType> map;
|
||||
|
||||
public Builder() {
|
||||
//noinspection unchecked
|
||||
|
@ -220,7 +220,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
|||
}
|
||||
|
||||
public Builder(int size) {
|
||||
this.map = new ObjectObjectOpenHashMap<>(size);
|
||||
this.map = new ObjectObjectHashMap<>(size);
|
||||
}
|
||||
|
||||
public Builder(ImmutableOpenMap<KType, VType> map) {
|
||||
|
@ -231,11 +231,13 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
|||
* Builds a new instance of the
|
||||
*/
|
||||
public ImmutableOpenMap<KType, VType> build() {
|
||||
ObjectObjectOpenHashMap<KType, VType> map = this.map;
|
||||
ObjectObjectHashMap<KType, VType> map = this.map;
|
||||
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
||||
return new ImmutableOpenMap<>(map);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Puts all the entries in the map to the builder.
|
||||
*/
|
||||
|
@ -313,7 +315,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
|||
}
|
||||
|
||||
@Override
|
||||
public int removeAll(ObjectContainer<? extends KType> container) {
|
||||
public int removeAll(ObjectContainer<? super KType> container) {
|
||||
return map.removeAll(container);
|
||||
}
|
||||
|
||||
|
@ -347,5 +349,49 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
|||
return (Builder) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int removeAll(ObjectObjectPredicate<? super KType, ? super VType> predicate) {
|
||||
return map.removeAll(predicate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends ObjectObjectPredicate<? super KType, ? super VType>> T forEach(T predicate) {
|
||||
return map.forEach(predicate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(KType key) {
|
||||
return map.indexOf(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean indexExists(int index) {
|
||||
return map.indexExists(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VType indexGet(int index) {
|
||||
return map.indexGet(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VType indexReplace(int index, VType newValue) {
|
||||
return map.indexReplace(index, newValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void indexInsert(int index, KType key, VType value) {
|
||||
map.indexInsert(index, key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
map.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String visualizeKeyDistribution(int characters) {
|
||||
return map.visualizeKeyDistribution(characters);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.lucene.search;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -149,7 +149,7 @@ public class MultiPhrasePrefixQuery extends Query {
|
|||
}
|
||||
Term[] suffixTerms = termArrays.get(sizeMinus1);
|
||||
int position = positions.get(sizeMinus1);
|
||||
ObjectOpenHashSet<Term> terms = new ObjectOpenHashSet<>();
|
||||
ObjectHashSet<Term> terms = new ObjectHashSet<>();
|
||||
for (Term term : suffixTerms) {
|
||||
getPrefixTerms(terms, term, reader);
|
||||
if (terms.size() > maxExpansions) {
|
||||
|
@ -163,7 +163,7 @@ public class MultiPhrasePrefixQuery extends Query {
|
|||
return query.rewrite(reader);
|
||||
}
|
||||
|
||||
private void getPrefixTerms(ObjectOpenHashSet<Term> terms, final Term prefix, final IndexReader reader) throws IOException {
|
||||
private void getPrefixTerms(ObjectHashSet<Term> terms, final Term prefix, final IndexReader reader) throws IOException {
|
||||
// SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms
|
||||
// instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually.
|
||||
List<LeafReaderContext> leaves = reader.leaves();
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.recycler;
|
||||
|
||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
||||
import com.carrotsearch.hppc.BitMixer;
|
||||
import com.google.common.collect.Queues;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
||||
|
@ -173,7 +173,7 @@ public enum Recyclers {
|
|||
final int slot() {
|
||||
final long id = Thread.currentThread().getId();
|
||||
// don't trust Thread.hashCode to have equiprobable low bits
|
||||
int slot = (int) MurmurHash3.hash(id);
|
||||
int slot = (int) BitMixer.mix64(id);
|
||||
// make positive, otherwise % may return negative numbers
|
||||
slot &= 0x7FFFFFFF;
|
||||
slot %= concurrencyLevel;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
||||
import com.carrotsearch.hppc.BitMixer;
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
|
||||
|
@ -35,7 +35,7 @@ abstract class AbstractPagedHashMap implements Releasable {
|
|||
static long hash(long value) {
|
||||
// Don't use the value directly. Under some cases eg dates, it could be that the low bits don't carry much value and we would like
|
||||
// all bits of the hash to carry as much value
|
||||
return MurmurHash3.hash(value);
|
||||
return BitMixer.mix64(value);
|
||||
}
|
||||
|
||||
static long hash(double value) {
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
||||
import com.carrotsearch.hppc.BitMixer;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
|
@ -56,7 +57,7 @@ public final class BytesRefHash extends AbstractHash {
|
|||
// BytesRef has a weak hashCode function so we try to improve it by rehashing using Murmur3
|
||||
// Feel free to remove rehashing if BytesRef gets a better hash function
|
||||
private static int rehash(int hash) {
|
||||
return MurmurHash3.hash(hash);
|
||||
return BitMixer.mix32(hash);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.gateway;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
|
@ -68,7 +68,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
|
|||
}
|
||||
|
||||
public void performStateRecovery(final GatewayStateRecoveredListener listener) throws GatewayException {
|
||||
ObjectOpenHashSet<String> nodesIds = ObjectOpenHashSet.from(clusterService.state().nodes().masterNodes().keys());
|
||||
ObjectHashSet<String> nodesIds = new ObjectHashSet<>(clusterService.state().nodes().masterNodes().keys());
|
||||
logger.trace("performing state recovery from {}", nodesIds);
|
||||
TransportNodesListGatewayMetaState.NodesGatewayMetaState nodesState = listGatewayMetaState.list(nodesIds.toArray(String.class), null).actionGet();
|
||||
|
||||
|
@ -104,7 +104,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
|
|||
}
|
||||
}
|
||||
|
||||
ObjectFloatOpenHashMap<String> indices = new ObjectFloatOpenHashMap<>();
|
||||
ObjectFloatHashMap<String> indices = new ObjectFloatHashMap<>();
|
||||
MetaData electedGlobalState = null;
|
||||
int found = 0;
|
||||
for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) {
|
||||
|
@ -127,10 +127,11 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
|
|||
}
|
||||
// update the global state, and clean the indices, we elect them in the next phase
|
||||
MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState).removeAllIndices();
|
||||
final boolean[] states = indices.allocated;
|
||||
|
||||
assert !indices.containsKey(null);
|
||||
final Object[] keys = indices.keys;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
String index = (String) keys[i];
|
||||
IndexMetaData electedIndexMetaData = null;
|
||||
int indexMetaDataCount = 0;
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.gateway;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.predicates.ObjectPredicate;
|
||||
import com.google.common.collect.Maps;
|
||||
|
@ -68,7 +68,7 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
|
||||
private final ConcurrentMap<ShardId, Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData>> cachedStores = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
private final ConcurrentMap<ShardId, ObjectLongOpenHashMap<DiscoveryNode>> cachedShardsState = ConcurrentCollections.newConcurrentMap();
|
||||
private final ConcurrentMap<ShardId, ObjectLongHashMap<DiscoveryNode>> cachedShardsState = ConcurrentCollections.newConcurrentMap();
|
||||
|
||||
private final TimeValue listTimeout;
|
||||
|
||||
|
@ -121,16 +121,17 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
continue;
|
||||
}
|
||||
|
||||
ObjectLongOpenHashMap<DiscoveryNode> nodesState = buildShardStates(nodes, shard, metaData.index(shard.index()));
|
||||
ObjectLongHashMap<DiscoveryNode> nodesState = buildShardStates(nodes, shard, metaData.index(shard.index()));
|
||||
|
||||
int numberOfAllocationsFound = 0;
|
||||
long highestVersion = -1;
|
||||
Set<DiscoveryNode> nodesWithHighestVersion = Sets.newHashSet();
|
||||
final boolean[] states = nodesState.allocated;
|
||||
|
||||
assert !nodesState.containsKey(null);
|
||||
final Object[] keys = nodesState.keys;
|
||||
final long[] values = nodesState.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (!states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -380,13 +381,13 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
* A shard on shared storage will return at least shard state 0 for all
|
||||
* nodes, indicating that the shard can be allocated to any node.
|
||||
*/
|
||||
private ObjectLongOpenHashMap<DiscoveryNode> buildShardStates(final DiscoveryNodes nodes, MutableShardRouting shard, IndexMetaData indexMetaData) {
|
||||
ObjectLongOpenHashMap<DiscoveryNode> shardStates = cachedShardsState.get(shard.shardId());
|
||||
ObjectOpenHashSet<String> nodeIds;
|
||||
private ObjectLongHashMap<DiscoveryNode> buildShardStates(final DiscoveryNodes nodes, MutableShardRouting shard, IndexMetaData indexMetaData) {
|
||||
ObjectLongHashMap<DiscoveryNode> shardStates = cachedShardsState.get(shard.shardId());
|
||||
ObjectHashSet<String> nodeIds;
|
||||
if (shardStates == null) {
|
||||
shardStates = new ObjectLongOpenHashMap<>();
|
||||
shardStates = new ObjectLongHashMap<>();
|
||||
cachedShardsState.put(shard.shardId(), shardStates);
|
||||
nodeIds = ObjectOpenHashSet.from(nodes.dataNodes().keys());
|
||||
nodeIds = new ObjectHashSet<>(nodes.dataNodes().keys());
|
||||
} else {
|
||||
// clean nodes that have failed
|
||||
shardStates.keys().removeAll(new ObjectPredicate<DiscoveryNode>() {
|
||||
|
@ -395,7 +396,7 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
return !nodes.nodeExists(node.id());
|
||||
}
|
||||
});
|
||||
nodeIds = ObjectOpenHashSet.newInstance();
|
||||
nodeIds = new ObjectHashSet<>();
|
||||
// we have stored cached from before, see if the nodes changed, if they have, go fetch again
|
||||
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
|
||||
DiscoveryNode node = cursor.value;
|
||||
|
@ -442,13 +443,13 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
|
||||
private Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> buildShardStores(DiscoveryNodes nodes, MutableShardRouting shard) {
|
||||
Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = cachedStores.get(shard.shardId());
|
||||
ObjectOpenHashSet<String> nodesIds;
|
||||
ObjectHashSet<String> nodesIds;
|
||||
if (shardStores == null) {
|
||||
shardStores = Maps.newHashMap();
|
||||
cachedStores.put(shard.shardId(), shardStores);
|
||||
nodesIds = ObjectOpenHashSet.from(nodes.dataNodes().keys());
|
||||
nodesIds = new ObjectHashSet<>(nodes.dataNodes().keys());
|
||||
} else {
|
||||
nodesIds = ObjectOpenHashSet.newInstance();
|
||||
nodesIds = new ObjectHashSet<>();
|
||||
// clean nodes that have failed
|
||||
for (Iterator<DiscoveryNode> it = shardStores.keySet().iterator(); it.hasNext(); ) {
|
||||
DiscoveryNode node = it.next();
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||
import com.google.common.collect.Maps;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
@ -32,12 +32,12 @@ import java.util.Map;
|
|||
*/
|
||||
public class NumericDateAnalyzer extends NumericAnalyzer<NumericDateTokenizer> {
|
||||
|
||||
private static final Map<String, IntObjectOpenHashMap<NamedAnalyzer>> globalAnalyzers = Maps.newHashMap();
|
||||
private static final Map<String, IntObjectHashMap<NamedAnalyzer>> globalAnalyzers = Maps.newHashMap();
|
||||
|
||||
public static synchronized NamedAnalyzer buildNamedAnalyzer(FormatDateTimeFormatter formatter, int precisionStep) {
|
||||
IntObjectOpenHashMap<NamedAnalyzer> precisionMap = globalAnalyzers.get(formatter.format());
|
||||
IntObjectHashMap<NamedAnalyzer> precisionMap = globalAnalyzers.get(formatter.format());
|
||||
if (precisionMap == null) {
|
||||
precisionMap = new IntObjectOpenHashMap<>();
|
||||
precisionMap = new IntObjectHashMap<>();
|
||||
globalAnalyzers.put(formatter.format(), precisionMap);
|
||||
}
|
||||
NamedAnalyzer namedAnalyzer = precisionMap.get(precisionStep);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -28,10 +28,10 @@ import java.io.IOException;
|
|||
*/
|
||||
public class NumericDoubleAnalyzer extends NumericAnalyzer<NumericDoubleTokenizer> {
|
||||
|
||||
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn;
|
||||
private final static IntObjectHashMap<NamedAnalyzer> builtIn;
|
||||
|
||||
static {
|
||||
builtIn = new IntObjectOpenHashMap<>();
|
||||
builtIn = new IntObjectHashMap<>();
|
||||
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_double/max", AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(Integer.MAX_VALUE)));
|
||||
for (int i = 0; i <= 64; i += 4) {
|
||||
builtIn.put(i, new NamedAnalyzer("_double/" + i, AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(i)));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -28,10 +28,10 @@ import java.io.IOException;
|
|||
*/
|
||||
public class NumericFloatAnalyzer extends NumericAnalyzer<NumericFloatTokenizer> {
|
||||
|
||||
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn;
|
||||
private final static IntObjectHashMap<NamedAnalyzer> builtIn;
|
||||
|
||||
static {
|
||||
builtIn = new IntObjectOpenHashMap<>();
|
||||
builtIn = new IntObjectHashMap<>();
|
||||
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_float/max", AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(Integer.MAX_VALUE)));
|
||||
for (int i = 0; i <= 64; i += 4) {
|
||||
builtIn.put(i, new NamedAnalyzer("_float/" + i, AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(i)));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -28,10 +28,10 @@ import java.io.IOException;
|
|||
*/
|
||||
public class NumericIntegerAnalyzer extends NumericAnalyzer<NumericIntegerTokenizer> {
|
||||
|
||||
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn;
|
||||
private final static IntObjectHashMap<NamedAnalyzer> builtIn;
|
||||
|
||||
static {
|
||||
builtIn = new IntObjectOpenHashMap<>();
|
||||
builtIn = new IntObjectHashMap<>();
|
||||
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_int/max", AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(Integer.MAX_VALUE)));
|
||||
for (int i = 0; i <= 64; i += 4) {
|
||||
builtIn.put(i, new NamedAnalyzer("_int/" + i, AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(i)));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -28,10 +28,10 @@ import java.io.IOException;
|
|||
*/
|
||||
public class NumericLongAnalyzer extends NumericAnalyzer<NumericLongTokenizer> {
|
||||
|
||||
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn;
|
||||
private final static IntObjectHashMap<NamedAnalyzer> builtIn;
|
||||
|
||||
static {
|
||||
builtIn = new IntObjectOpenHashMap<>();
|
||||
builtIn = new IntObjectHashMap<>();
|
||||
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_long/max", AnalyzerScope.GLOBAL, new NumericLongAnalyzer(Integer.MAX_VALUE)));
|
||||
for (int i = 0; i <= 64; i += 4) {
|
||||
builtIn.put(i, new NamedAnalyzer("_long/" + i, AnalyzerScope.GLOBAL, new NumericLongAnalyzer(i)));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -38,13 +38,13 @@ public class FieldDataStats implements Streamable, ToXContent {
|
|||
long memorySize;
|
||||
long evictions;
|
||||
@Nullable
|
||||
ObjectLongOpenHashMap<String> fields;
|
||||
ObjectLongHashMap<String> fields;
|
||||
|
||||
public FieldDataStats() {
|
||||
|
||||
}
|
||||
|
||||
public FieldDataStats(long memorySize, long evictions, @Nullable ObjectLongOpenHashMap<String> fields) {
|
||||
public FieldDataStats(long memorySize, long evictions, @Nullable ObjectLongHashMap<String> fields) {
|
||||
this.memorySize = memorySize;
|
||||
this.evictions = evictions;
|
||||
this.fields = fields;
|
||||
|
@ -54,17 +54,20 @@ public class FieldDataStats implements Streamable, ToXContent {
|
|||
this.memorySize += stats.memorySize;
|
||||
this.evictions += stats.evictions;
|
||||
if (stats.fields != null) {
|
||||
if (fields == null) fields = new ObjectLongOpenHashMap<>();
|
||||
final boolean[] states = stats.fields.allocated;
|
||||
if (fields == null) {
|
||||
fields = stats.fields.clone();
|
||||
} else {
|
||||
assert !stats.fields.containsKey(null);
|
||||
final Object[] keys = stats.fields.keys;
|
||||
final long[] values = stats.fields.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
fields.addTo((String) keys[i], values[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public long getMemorySizeInBytes() {
|
||||
return this.memorySize;
|
||||
|
@ -79,7 +82,7 @@ public class FieldDataStats implements Streamable, ToXContent {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
public ObjectLongOpenHashMap<String> getFields() {
|
||||
public ObjectLongHashMap<String> getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
|
@ -95,7 +98,7 @@ public class FieldDataStats implements Streamable, ToXContent {
|
|||
evictions = in.readVLong();
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
fields = new ObjectLongOpenHashMap<>(size);
|
||||
fields = new ObjectLongHashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
fields.put(in.readString(), in.readVLong());
|
||||
}
|
||||
|
@ -111,11 +114,11 @@ public class FieldDataStats implements Streamable, ToXContent {
|
|||
} else {
|
||||
out.writeBoolean(true);
|
||||
out.writeVInt(fields.size());
|
||||
final boolean[] states = fields.allocated;
|
||||
assert !fields.containsKey(null);
|
||||
final Object[] keys = fields.keys;
|
||||
final long[] values = fields.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
out.writeString((String) keys[i]);
|
||||
out.writeVLong(values[i]);
|
||||
}
|
||||
|
@ -130,11 +133,11 @@ public class FieldDataStats implements Streamable, ToXContent {
|
|||
builder.field(Fields.EVICTIONS, getEvictions());
|
||||
if (fields != null) {
|
||||
builder.startObject(Fields.FIELDS);
|
||||
final boolean[] states = fields.allocated;
|
||||
assert !fields.containsKey(null);
|
||||
final Object[] keys = fields.keys;
|
||||
final long[] values = fields.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE);
|
||||
builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, values[i]);
|
||||
builder.endObject();
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
|
@ -50,9 +50,9 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index
|
|||
}
|
||||
|
||||
public FieldDataStats stats(String... fields) {
|
||||
ObjectLongOpenHashMap<String> fieldTotals = null;
|
||||
ObjectLongHashMap<String> fieldTotals = null;
|
||||
if (fields != null && fields.length > 0) {
|
||||
fieldTotals = new ObjectLongOpenHashMap<>();
|
||||
fieldTotals = new ObjectLongHashMap<>();
|
||||
for (Map.Entry<String, CounterMetric> entry : perFieldTotals.entrySet()) {
|
||||
if (Regex.simpleMatch(fields, entry.getKey())) {
|
||||
fieldTotals.put(entry.getKey(), entry.getValue().count());
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.collect.ImmutableSortedSet;
|
||||
|
||||
|
@ -132,7 +132,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
|||
);
|
||||
ParentChildEstimator estimator = new ParentChildEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA), termsEnum);
|
||||
TermsEnum estimatedTermsEnum = estimator.beforeLoad(null);
|
||||
ObjectObjectOpenHashMap<String, TypeBuilder> typeBuilders = ObjectObjectOpenHashMap.newInstance();
|
||||
ObjectObjectHashMap<String, TypeBuilder> typeBuilders = new ObjectObjectHashMap<>();
|
||||
try {
|
||||
try {
|
||||
PostingsEnum docsEnum = null;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
@ -79,7 +79,7 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
|||
public class MapperService extends AbstractIndexComponent {
|
||||
|
||||
public static final String DEFAULT_MAPPING = "_default_";
|
||||
private static ObjectOpenHashSet<String> META_FIELDS = ObjectOpenHashSet.from(
|
||||
private static ObjectHashSet<String> META_FIELDS = ObjectHashSet.from(
|
||||
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
|
||||
"_size", "_timestamp", "_ttl"
|
||||
);
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -106,7 +106,7 @@ public abstract class ParseContext {
|
|||
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */
|
||||
public void addWithKey(Object key, IndexableField field) {
|
||||
if (keyedFields == null) {
|
||||
keyedFields = new ObjectObjectOpenHashMap<>();
|
||||
keyedFields = new ObjectObjectHashMap<>();
|
||||
} else if (keyedFields.containsKey(key)) {
|
||||
throw new IllegalStateException("Only one field can be stored per key");
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Function;
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import com.carrotsearch.hppc.DoubleHashSet;
|
||||
import com.carrotsearch.hppc.LongArrayList;
|
||||
import com.carrotsearch.hppc.LongHashSet;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.NumericTokenStream;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.Iterators;
|
||||
|
@ -748,11 +748,11 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
|
|||
TYPE.freeze();
|
||||
}
|
||||
|
||||
private final ObjectOpenHashSet<GeoPoint> points;
|
||||
private final ObjectHashSet<GeoPoint> points;
|
||||
|
||||
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
|
||||
super(name);
|
||||
points = new ObjectOpenHashSet<>(2);
|
||||
points = new ObjectHashSet<>(2);
|
||||
points.add(new GeoPoint(lat, lon));
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
@ -41,7 +41,7 @@ public class MultiMatchQueryBuilder extends BaseQueryBuilder implements Boostabl
|
|||
private final Object text;
|
||||
|
||||
private final List<String> fields;
|
||||
private ObjectFloatOpenHashMap<String> fieldsBoosts;
|
||||
private ObjectFloatHashMap<String> fieldsBoosts;
|
||||
|
||||
private MultiMatchQueryBuilder.Type type;
|
||||
|
||||
|
@ -176,7 +176,7 @@ public class MultiMatchQueryBuilder extends BaseQueryBuilder implements Boostabl
|
|||
public MultiMatchQueryBuilder field(String field, float boost) {
|
||||
fields.add(field);
|
||||
if (fieldsBoosts == null) {
|
||||
fieldsBoosts = new ObjectFloatOpenHashMap<>();
|
||||
fieldsBoosts = new ObjectFloatHashMap<>();
|
||||
}
|
||||
fieldsBoosts.put(field, boost);
|
||||
return this;
|
||||
|
@ -336,8 +336,9 @@ public class MultiMatchQueryBuilder extends BaseQueryBuilder implements Boostabl
|
|||
builder.field("query", text);
|
||||
builder.startArray("fields");
|
||||
for (String field : fields) {
|
||||
if (fieldsBoosts != null && fieldsBoosts.containsKey(field)) {
|
||||
field += "^" + fieldsBoosts.lget();
|
||||
final int keySlot;
|
||||
if (fieldsBoosts != null && ((keySlot = fieldsBoosts.indexOf(field)) >= 0)) {
|
||||
field += "^" + fieldsBoosts.indexGet(keySlot);
|
||||
}
|
||||
builder.value(field);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -80,7 +80,7 @@ public class QueryStringQueryBuilder extends BaseQueryBuilder implements Boostab
|
|||
|
||||
private List<String> fields;
|
||||
|
||||
private ObjectFloatOpenHashMap<String> fieldsBoosts;
|
||||
private ObjectFloatHashMap<String> fieldsBoosts;
|
||||
|
||||
private Boolean useDisMax;
|
||||
|
||||
|
@ -132,7 +132,7 @@ public class QueryStringQueryBuilder extends BaseQueryBuilder implements Boostab
|
|||
}
|
||||
fields.add(field);
|
||||
if (fieldsBoosts == null) {
|
||||
fieldsBoosts = new ObjectFloatOpenHashMap<>();
|
||||
fieldsBoosts = new ObjectFloatHashMap<>();
|
||||
}
|
||||
fieldsBoosts.put(field, boost);
|
||||
return this;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.lucene.queryparser.classic.MapperQueryParser;
|
||||
|
@ -110,7 +110,7 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
qpSettings.fields().add(field);
|
||||
if (fBoost != -1) {
|
||||
if (qpSettings.boosts() == null) {
|
||||
qpSettings.boosts(new ObjectFloatOpenHashMap<String>());
|
||||
qpSettings.boosts(new ObjectFloatHashMap<String>());
|
||||
}
|
||||
qpSettings.boosts().put(field, fBoost);
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
qpSettings.fields().add(fField);
|
||||
if (fBoost != -1) {
|
||||
if (qpSettings.boosts() == null) {
|
||||
qpSettings.boosts(new ObjectFloatOpenHashMap<String>());
|
||||
qpSettings.boosts(new ObjectFloatHashMap<String>());
|
||||
}
|
||||
qpSettings.boosts().put(fField, fBoost);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.indices.cache.query;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.ObjectSet;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
|
@ -386,8 +386,8 @@ public class IndicesQueryCache extends AbstractComponent implements RemovalListe
|
|||
|
||||
private class Reaper implements Runnable {
|
||||
|
||||
private final ObjectSet<CleanupKey> currentKeysToClean = ObjectOpenHashSet.newInstance();
|
||||
private final ObjectSet<IndexShard> currentFullClean = ObjectOpenHashSet.newInstance();
|
||||
private final ObjectSet<CleanupKey> currentKeysToClean = new ObjectHashSet<>();
|
||||
private final ObjectSet<IndexShard> currentFullClean = new ObjectHashSet<>();
|
||||
|
||||
private volatile boolean closed;
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.indices.cluster;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.ObjectContainer;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.base.Predicate;
|
||||
|
@ -262,7 +262,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
if (routingNode == null) {
|
||||
return;
|
||||
}
|
||||
IntOpenHashSet newShardIds = new IntOpenHashSet();
|
||||
IntHashSet newShardIds = new IntHashSet();
|
||||
for (IndexService indexService : indicesService) {
|
||||
String index = indexService.index().name();
|
||||
IndexMetaData indexMetaData = event.state().metaData().index(index);
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.percolator;
|
||||
|
||||
import com.carrotsearch.hppc.ByteObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ByteObjectHashMap;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -109,7 +109,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
public final static String TYPE_NAME = ".percolator";
|
||||
|
||||
private final IndicesService indicesService;
|
||||
private final ByteObjectOpenHashMap<PercolatorType> percolatorTypes;
|
||||
private final ByteObjectHashMap<PercolatorType> percolatorTypes;
|
||||
private final PageCacheRecycler pageCacheRecycler;
|
||||
private final BigArrays bigArrays;
|
||||
private final ClusterService clusterService;
|
||||
|
@ -153,7 +153,7 @@ public class PercolatorService extends AbstractComponent {
|
|||
single = new SingleDocumentPercolatorIndex(cache);
|
||||
multi = new MultiDocumentPercolatorIndex(cache);
|
||||
|
||||
percolatorTypes = new ByteObjectOpenHashMap<>(6);
|
||||
percolatorTypes = new ByteObjectHashMap<>(6);
|
||||
percolatorTypes.put(countPercolator.id(), countPercolator);
|
||||
percolatorTypes.put(queryCountPercolator.id(), queryCountPercolator);
|
||||
percolatorTypes.put(matchPercolator.id(), matchPercolator);
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.rest.action.cat;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntScatterMap;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
|
@ -98,7 +100,7 @@ public class RestAllocationAction extends AbstractCatAction {
|
|||
}
|
||||
|
||||
private Table buildTable(RestRequest request, final ClusterStateResponse state, final NodesStatsResponse stats) {
|
||||
final ObjectIntOpenHashMap<String> allocs = new ObjectIntOpenHashMap<>();
|
||||
final ObjectIntScatterMap<String> allocs = new ObjectIntScatterMap<>();
|
||||
|
||||
for (ShardRouting shard : state.getState().routingTable().allShards()) {
|
||||
String nodeId = "UNASSIGNED";
|
||||
|
@ -115,10 +117,7 @@ public class RestAllocationAction extends AbstractCatAction {
|
|||
for (NodeStats nodeStats : stats.getNodes()) {
|
||||
DiscoveryNode node = nodeStats.getNode();
|
||||
|
||||
int shardCount = 0;
|
||||
if (allocs.containsKey(node.id())) {
|
||||
shardCount = allocs.lget();
|
||||
}
|
||||
int shardCount = allocs.getOrDefault(node.id(), 0);
|
||||
|
||||
ByteSizeValue total = nodeStats.getFs().getTotal().getTotal();
|
||||
ByteSizeValue avail = nodeStats.getFs().getTotal().getAvailable();
|
||||
|
@ -144,16 +143,17 @@ public class RestAllocationAction extends AbstractCatAction {
|
|||
table.endRow();
|
||||
}
|
||||
|
||||
if (allocs.containsKey("UNASSIGNED")) {
|
||||
final String UNASSIGNED = "UNASSIGNED";
|
||||
if (allocs.containsKey(UNASSIGNED)) {
|
||||
table.startRow();
|
||||
table.addCell(allocs.lget());
|
||||
table.addCell(allocs.get(UNASSIGNED));
|
||||
table.addCell(null);
|
||||
table.addCell(null);
|
||||
table.addCell(null);
|
||||
table.addCell(null);
|
||||
table.addCell(null);
|
||||
table.addCell(null);
|
||||
table.addCell("UNASSIGNED");
|
||||
table.addCell(UNASSIGNED);
|
||||
table.endRow();
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.rest.action.cat;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongMap;
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
|
@ -94,7 +94,7 @@ public class RestFielddataAction extends AbstractCatAction {
|
|||
|
||||
// Collect all the field names so a new table can be built
|
||||
for (NodeStats ns : nodeStatses.getNodes()) {
|
||||
ObjectLongOpenHashMap<String> fields = ns.getIndices().getFieldData().getFields();
|
||||
ObjectLongHashMap<String> fields = ns.getIndices().getFieldData().getFields();
|
||||
nodesFields.put(ns, fields);
|
||||
if (fields != null) {
|
||||
for (String key : fields.keys().toArray(String.class)) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.ObjectSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.base.Charsets;
|
||||
|
@ -837,7 +837,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
|||
public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
|
||||
final Loading defaultLoading = Loading.parse(indexMetaData.settings().get(NORMS_LOADING_KEY), Loading.LAZY);
|
||||
final MapperService mapperService = indexShard.mapperService();
|
||||
final ObjectSet<String> warmUp = new ObjectOpenHashSet<>();
|
||||
final ObjectSet<String> warmUp = new ObjectHashSet<>();
|
||||
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
||||
for (FieldMapper<?> fieldMapper : docMapper.mappers()) {
|
||||
final String indexName = fieldMapper.names().indexName();
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.nested;
|
||||
|
||||
import com.carrotsearch.hppc.LongIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.LongIntHashMap;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
|
@ -77,22 +77,24 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
|
|||
} else {
|
||||
parentDocs = docIdSet.bits();
|
||||
}
|
||||
final LongIntOpenHashMap bucketOrdToLastCollectedParentDoc = new LongIntOpenHashMap(32);
|
||||
final LongIntHashMap bucketOrdToLastCollectedParentDoc = new LongIntHashMap(32);
|
||||
return new LeafBucketCollectorBase(sub, null) {
|
||||
@Override
|
||||
public void collect(int childDoc, long bucket) throws IOException {
|
||||
// fast forward to retrieve the parentDoc this childDoc belongs to
|
||||
final int parentDoc = parentDocs.nextSetBit(childDoc);
|
||||
assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS;
|
||||
if (bucketOrdToLastCollectedParentDoc.containsKey(bucket)) {
|
||||
int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.lget();
|
||||
|
||||
int keySlot = bucketOrdToLastCollectedParentDoc.indexOf(bucket);
|
||||
if (bucketOrdToLastCollectedParentDoc.indexExists(keySlot)) {
|
||||
int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.indexGet(keySlot);
|
||||
if (parentDoc > lastCollectedParentDoc) {
|
||||
collectBucket(sub, parentDoc, bucket);
|
||||
bucketOrdToLastCollectedParentDoc.lset(parentDoc);
|
||||
bucketOrdToLastCollectedParentDoc.indexReplace(keySlot, parentDoc);
|
||||
}
|
||||
} else {
|
||||
collectBucket(sub, parentDoc, bucket);
|
||||
bucketOrdToLastCollectedParentDoc.put(bucket, parentDoc);
|
||||
bucketOrdToLastCollectedParentDoc.indexInsert(keySlot, bucket, parentDoc);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.terms.support;
|
||||
|
||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
||||
import com.carrotsearch.hppc.LongHashSet;
|
||||
import com.carrotsearch.hppc.LongSet;
|
||||
|
||||
import org.apache.lucene.index.RandomAccessOrds;
|
||||
|
@ -59,10 +59,10 @@ public class IncludeExclude {
|
|||
|
||||
private LongFilter(int numValids, int numInvalids) {
|
||||
if (numValids > 0) {
|
||||
valids = new LongOpenHashSet(numValids);
|
||||
valids = new LongHashSet(numValids);
|
||||
}
|
||||
if (numInvalids > 0) {
|
||||
invalids = new LongOpenHashSet(numInvalids);
|
||||
invalids = new LongHashSet(numInvalids);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||
|
||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
||||
import com.carrotsearch.hppc.BitMixer;
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -375,7 +375,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
|||
|
||||
@Override
|
||||
public long valueAt(int index) {
|
||||
return MurmurHash3.hash(values.valueAt(index));
|
||||
return BitMixer.mix64(values.valueAt(index));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -399,7 +399,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
|||
|
||||
@Override
|
||||
public long valueAt(int index) {
|
||||
return MurmurHash3.hash(java.lang.Double.doubleToLongBits(values.valueAt(index)));
|
||||
return BitMixer.mix64(java.lang.Double.doubleToLongBits(values.valueAt(index)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.builder;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -117,7 +117,7 @@ public class SearchSourceBuilder implements ToXContent {
|
|||
private List<RescoreBuilder> rescoreBuilders;
|
||||
private Integer defaultRescoreWindowSize;
|
||||
|
||||
private ObjectFloatOpenHashMap<String> indexBoost = null;
|
||||
private ObjectFloatHashMap<String> indexBoost = null;
|
||||
|
||||
private String[] stats;
|
||||
|
||||
|
@ -653,7 +653,7 @@ public class SearchSourceBuilder implements ToXContent {
|
|||
*/
|
||||
public SearchSourceBuilder indexBoost(String index, float indexBoost) {
|
||||
if (this.indexBoost == null) {
|
||||
this.indexBoost = new ObjectFloatOpenHashMap<>();
|
||||
this.indexBoost = new ObjectFloatHashMap<>();
|
||||
}
|
||||
this.indexBoost.put(index, indexBoost);
|
||||
return this;
|
||||
|
@ -809,11 +809,11 @@ public class SearchSourceBuilder implements ToXContent {
|
|||
|
||||
if (indexBoost != null) {
|
||||
builder.startObject("indices_boost");
|
||||
final boolean[] states = indexBoost.allocated;
|
||||
assert !indexBoost.containsKey(null);
|
||||
final Object[] keys = indexBoost.keys;
|
||||
final float[] values = indexBoost.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
builder.field((String) keys[i], values[i]);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.search.controller;
|
||||
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -102,8 +102,8 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
}
|
||||
|
||||
public AggregatedDfs aggregateDfs(AtomicArray<DfsSearchResult> results) {
|
||||
ObjectObjectOpenHashMap<Term, TermStatistics> termStatistics = HppcMaps.newNoNullKeysMap();
|
||||
ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||
ObjectObjectHashMap<Term, TermStatistics> termStatistics = HppcMaps.newNoNullKeysMap();
|
||||
ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||
long aggMaxDoc = 0;
|
||||
for (AtomicArray.Entry<DfsSearchResult> lEntry : results.asList()) {
|
||||
final Term[] terms = lEntry.value.terms();
|
||||
|
@ -124,11 +124,12 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
}
|
||||
|
||||
}
|
||||
final boolean[] states = lEntry.value.fieldStatistics().allocated;
|
||||
|
||||
assert !lEntry.value.fieldStatistics().containsKey(null);
|
||||
final Object[] keys = lEntry.value.fieldStatistics().keys;
|
||||
final Object[] values = lEntry.value.fieldStatistics().values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
String key = (String) keys[i];
|
||||
CollectionStatistics value = (CollectionStatistics) values[i];
|
||||
assert key != null;
|
||||
|
|
|
@ -20,7 +20,9 @@
|
|||
package org.elasticsearch.search.dfs;
|
||||
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.CollectionStatistics;
|
||||
import org.apache.lucene.search.TermStatistics;
|
||||
|
@ -33,24 +35,24 @@ import java.io.IOException;
|
|||
|
||||
public class AggregatedDfs implements Streamable {
|
||||
|
||||
private ObjectObjectOpenHashMap<Term, TermStatistics> termStatistics;
|
||||
private ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics;
|
||||
private ObjectObjectHashMap<Term, TermStatistics> termStatistics;
|
||||
private ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics;
|
||||
private long maxDoc;
|
||||
|
||||
private AggregatedDfs() {
|
||||
}
|
||||
|
||||
public AggregatedDfs(ObjectObjectOpenHashMap<Term, TermStatistics> termStatistics, ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics, long maxDoc) {
|
||||
public AggregatedDfs(ObjectObjectHashMap<Term, TermStatistics> termStatistics, ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics, long maxDoc) {
|
||||
this.termStatistics = termStatistics;
|
||||
this.fieldStatistics = fieldStatistics;
|
||||
this.maxDoc = maxDoc;
|
||||
}
|
||||
|
||||
public ObjectObjectOpenHashMap<Term, TermStatistics> termStatistics() {
|
||||
public ObjectObjectHashMap<Term, TermStatistics> termStatistics() {
|
||||
return termStatistics;
|
||||
}
|
||||
|
||||
public ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics() {
|
||||
public ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics() {
|
||||
return fieldStatistics;
|
||||
}
|
||||
|
||||
|
@ -82,20 +84,17 @@ public class AggregatedDfs implements Streamable {
|
|||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
out.writeVInt(termStatistics.size());
|
||||
final boolean[] states = termStatistics.allocated;
|
||||
final Object[] keys = termStatistics.keys;
|
||||
final Object[] values = termStatistics.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
Term term = (Term) keys[i];
|
||||
|
||||
for (ObjectObjectCursor<Term, TermStatistics> c : termStatistics()) {
|
||||
Term term = (Term) c.key;
|
||||
out.writeString(term.field());
|
||||
out.writeBytesRef(term.bytes());
|
||||
TermStatistics stats = (TermStatistics) values[i];
|
||||
TermStatistics stats = (TermStatistics) c.value;
|
||||
out.writeBytesRef(stats.term());
|
||||
out.writeVLong(stats.docFreq());
|
||||
out.writeVLong(DfsSearchResult.addOne(stats.totalTermFreq()));
|
||||
}
|
||||
}
|
||||
|
||||
DfsSearchResult.writeFieldStats(out, fieldStatistics);
|
||||
out.writeVLong(maxDoc);
|
||||
}
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.search.dfs;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
|
@ -55,7 +55,7 @@ public class DfsPhase implements SearchPhase {
|
|||
|
||||
@Override
|
||||
public void execute(SearchContext context) {
|
||||
final ObjectOpenHashSet<Term> termsSet = new ObjectOpenHashSet<>();
|
||||
final ObjectHashSet<Term> termsSet = new ObjectHashSet<>();
|
||||
try {
|
||||
if (!context.queryRewritten()) {
|
||||
context.updateRewriteQuery(context.searcher().rewrite(context.query()));
|
||||
|
@ -75,7 +75,7 @@ public class DfsPhase implements SearchPhase {
|
|||
termStatistics[i] = context.searcher().termStatistics(terms[i], termContext);
|
||||
}
|
||||
|
||||
ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||
ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||
for (Term term : terms) {
|
||||
assert term.field() != null : "field is null";
|
||||
if (!fieldStatistics.containsKey(term.field())) {
|
||||
|
@ -97,9 +97,9 @@ public class DfsPhase implements SearchPhase {
|
|||
// We need to bridge to JCF world, b/c of Query#extractTerms
|
||||
private static class DelegateSet extends AbstractSet<Term> {
|
||||
|
||||
private final ObjectOpenHashSet<Term> delegate;
|
||||
private final ObjectHashSet<Term> delegate;
|
||||
|
||||
private DelegateSet(ObjectOpenHashSet<Term> delegate) {
|
||||
private DelegateSet(ObjectHashSet<Term> delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.search.dfs;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.CollectionStatistics;
|
||||
import org.apache.lucene.search.TermStatistics;
|
||||
|
@ -45,7 +47,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
|||
private long id;
|
||||
private Term[] terms;
|
||||
private TermStatistics[] termStatistics;
|
||||
private ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||
private ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||
private int maxDoc;
|
||||
|
||||
public DfsSearchResult() {
|
||||
|
@ -87,7 +89,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
|||
return this;
|
||||
}
|
||||
|
||||
public DfsSearchResult fieldStatistics(ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics) {
|
||||
public DfsSearchResult fieldStatistics(ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics) {
|
||||
this.fieldStatistics = fieldStatistics;
|
||||
return this;
|
||||
}
|
||||
|
@ -100,7 +102,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
|||
return termStatistics;
|
||||
}
|
||||
|
||||
public ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics() {
|
||||
public ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics() {
|
||||
return fieldStatistics;
|
||||
}
|
||||
|
||||
|
@ -145,15 +147,12 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
|||
out.writeVInt(maxDoc);
|
||||
}
|
||||
|
||||
public static void writeFieldStats(StreamOutput out, ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics) throws IOException {
|
||||
public static void writeFieldStats(StreamOutput out, ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics) throws IOException {
|
||||
out.writeVInt(fieldStatistics.size());
|
||||
final boolean[] states = fieldStatistics.allocated;
|
||||
Object[] keys = fieldStatistics.keys;
|
||||
Object[] values = fieldStatistics.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
out.writeString((String) keys[i]);
|
||||
CollectionStatistics statistics = (CollectionStatistics) values[i];
|
||||
|
||||
for (ObjectObjectCursor<String, CollectionStatistics> c : fieldStatistics) {
|
||||
out.writeString(c.key);
|
||||
CollectionStatistics statistics = c.value;
|
||||
assert statistics.maxDoc() >= 0;
|
||||
out.writeVLong(statistics.maxDoc());
|
||||
out.writeVLong(addOne(statistics.docCount()));
|
||||
|
@ -161,7 +160,6 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
|||
out.writeVLong(addOne(statistics.sumDocFreq()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void writeTermStats(StreamOutput out, TermStatistics[] termStatistics) throws IOException {
|
||||
out.writeVInt(termStatistics.length);
|
||||
|
@ -176,11 +174,11 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
|||
out.writeVLong(addOne(termStatistic.totalTermFreq()));
|
||||
}
|
||||
|
||||
public static ObjectObjectOpenHashMap<String, CollectionStatistics> readFieldStats(StreamInput in) throws IOException {
|
||||
public static ObjectObjectHashMap<String, CollectionStatistics> readFieldStats(StreamInput in) throws IOException {
|
||||
return readFieldStats(in, null);
|
||||
}
|
||||
|
||||
public static ObjectObjectOpenHashMap<String, CollectionStatistics> readFieldStats(StreamInput in, ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics) throws IOException {
|
||||
public static ObjectObjectHashMap<String, CollectionStatistics> readFieldStats(StreamInput in, ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics) throws IOException {
|
||||
final int numFieldStatistics = in.readVInt();
|
||||
if (fieldStatistics == null) {
|
||||
fieldStatistics = HppcMaps.newNoNullKeysMap(numFieldStatistics);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -51,7 +51,7 @@ public class InternalSearchHits implements SearchHits {
|
|||
}
|
||||
|
||||
private IdentityHashMap<SearchShardTarget, Integer> shardHandleLookup = new IdentityHashMap<>();
|
||||
private IntObjectOpenHashMap<SearchShardTarget> handleShardLookup = new IntObjectOpenHashMap<>();
|
||||
private IntObjectHashMap<SearchShardTarget> handleShardLookup = new IntObjectHashMap<>();
|
||||
private ShardTargetType streamShardTarget = ShardTargetType.STREAM;
|
||||
|
||||
public StreamContext reset() {
|
||||
|
@ -65,7 +65,7 @@ public class InternalSearchHits implements SearchHits {
|
|||
return shardHandleLookup;
|
||||
}
|
||||
|
||||
public IntObjectOpenHashMap<SearchShardTarget> handleShardLookup() {
|
||||
public IntObjectHashMap<SearchShardTarget> handleShardLookup() {
|
||||
return handleShardLookup;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.suggest.completion;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
|
@ -283,9 +283,9 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider
|
|||
@Override
|
||||
public CompletionStats stats(String... fields) {
|
||||
long sizeInBytes = 0;
|
||||
ObjectLongOpenHashMap<String> completionFields = null;
|
||||
ObjectLongHashMap<String> completionFields = null;
|
||||
if (fields != null && fields.length > 0) {
|
||||
completionFields = new ObjectLongOpenHashMap<>(fields.length);
|
||||
completionFields = new ObjectLongHashMap<>(fields.length);
|
||||
}
|
||||
|
||||
for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) {
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.suggest.completion;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -38,12 +38,12 @@ public class CompletionStats implements Streamable, ToXContent {
|
|||
private long sizeInBytes;
|
||||
|
||||
@Nullable
|
||||
private ObjectLongOpenHashMap<String> fields;
|
||||
private ObjectLongHashMap<String> fields;
|
||||
|
||||
public CompletionStats() {
|
||||
}
|
||||
|
||||
public CompletionStats(long size, @Nullable ObjectLongOpenHashMap<String> fields) {
|
||||
public CompletionStats(long size, @Nullable ObjectLongHashMap<String> fields) {
|
||||
this.sizeInBytes = size;
|
||||
this.fields = fields;
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ public class CompletionStats implements Streamable, ToXContent {
|
|||
return new ByteSizeValue(sizeInBytes);
|
||||
}
|
||||
|
||||
public ObjectLongOpenHashMap<String> getFields() {
|
||||
public ObjectLongHashMap<String> getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
|
@ -65,7 +65,7 @@ public class CompletionStats implements Streamable, ToXContent {
|
|||
sizeInBytes = in.readVLong();
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
fields = new ObjectLongOpenHashMap<>(size);
|
||||
fields = new ObjectLongHashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
fields.put(in.readString(), in.readVLong());
|
||||
}
|
||||
|
@ -80,11 +80,12 @@ public class CompletionStats implements Streamable, ToXContent {
|
|||
} else {
|
||||
out.writeBoolean(true);
|
||||
out.writeVInt(fields.size());
|
||||
final boolean[] states = fields.allocated;
|
||||
|
||||
assert !fields.containsKey(null);
|
||||
final Object[] keys = fields.keys;
|
||||
final long[] values = fields.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
out.writeString((String) keys[i]);
|
||||
out.writeVLong(values[i]);
|
||||
}
|
||||
|
@ -98,11 +99,12 @@ public class CompletionStats implements Streamable, ToXContent {
|
|||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, sizeInBytes);
|
||||
if (fields != null) {
|
||||
builder.startObject(Fields.FIELDS);
|
||||
final boolean[] states = fields.allocated;
|
||||
|
||||
assert !fields.containsKey(null);
|
||||
final Object[] keys = fields.keys;
|
||||
final long[] values = fields.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE);
|
||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, values[i]);
|
||||
builder.endObject();
|
||||
|
@ -135,16 +137,18 @@ public class CompletionStats implements Streamable, ToXContent {
|
|||
sizeInBytes += completion.getSizeInBytes();
|
||||
|
||||
if (completion.fields != null) {
|
||||
if (fields == null) fields = new ObjectLongOpenHashMap<>();
|
||||
|
||||
final boolean[] states = completion.fields.allocated;
|
||||
if (fields == null) {
|
||||
fields = completion.fields.clone();
|
||||
} else {
|
||||
assert !completion.fields.containsKey(null);
|
||||
final Object[] keys = completion.fields.keys;
|
||||
final long[] values = completion.fields.values;
|
||||
for (int i = 0; i < states.length; i++) {
|
||||
if (states[i]) {
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
fields.addTo((String) keys[i], values[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.suggest.context;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
|
@ -368,7 +368,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
|||
}
|
||||
} else if (FIELD_PRECISION.equals(fieldName)) {
|
||||
if(parser.nextToken() == Token.START_ARRAY) {
|
||||
IntOpenHashSet precisions = new IntOpenHashSet();
|
||||
IntHashSet precisions = new IntHashSet();
|
||||
while(parser.nextToken() != Token.END_ARRAY) {
|
||||
precisions.add(parsePrecision(parser));
|
||||
}
|
||||
|
@ -448,7 +448,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
|||
|
||||
public static class Builder extends ContextBuilder<GeolocationContextMapping> {
|
||||
|
||||
private IntOpenHashSet precisions = new IntOpenHashSet();
|
||||
private IntHashSet precisions = new IntHashSet();
|
||||
private boolean neighbors; // take neighbor cell on the lowest level into account
|
||||
private HashSet<String> defaultLocations = new HashSet<>();
|
||||
private String fieldName = null;
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.snapshots;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.IntSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
@ -185,7 +185,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
|
|||
snapshotIndexMetaData = updateIndexSettings(snapshotIndexMetaData, request.indexSettings, request.ignoreIndexSettings);
|
||||
// Check that the index is closed or doesn't exist
|
||||
IndexMetaData currentIndexMetaData = currentState.metaData().index(renamedIndex);
|
||||
IntSet ignoreShards = new IntOpenHashSet();
|
||||
IntSet ignoreShards = new IntHashSet();
|
||||
if (currentIndexMetaData == null) {
|
||||
// Index doesn't exist - create it and start recovery
|
||||
// Make sure that the index we are about to create has a validate name
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.transport;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import org.elasticsearch.common.ContextHolder;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.termvectors;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.payloads.PayloadHelper;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
|
@ -493,7 +493,7 @@ public class GetTermVectorsTests extends AbstractTermVectorsTests {
|
|||
|
||||
private String createString(String[] tokens, Map<String, List<BytesRef>> payloads, int encoding, char delimiter) {
|
||||
String resultString = "";
|
||||
ObjectIntOpenHashMap<String> payloadCounter = new ObjectIntOpenHashMap<>();
|
||||
ObjectIntHashMap<String> payloadCounter = new ObjectIntHashMap<>();
|
||||
for (String token : tokens) {
|
||||
if (!payloadCounter.containsKey(token)) {
|
||||
payloadCounter.putIfAbsent(token, 0);
|
||||
|
|
|
@ -601,12 +601,12 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias2"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).alias(), equalTo("alias1"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).alias(), equalTo("alias2"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue());
|
||||
|
|
|
@ -19,10 +19,10 @@
|
|||
|
||||
package org.elasticsearch.benchmark.hppc;
|
||||
|
||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.unit.SizeValue;
|
||||
|
@ -31,6 +31,12 @@ import java.util.HashMap;
|
|||
import java.util.IdentityHashMap;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
// TODO: these benchmarks aren't too good and may be easily skewed by jit doing
|
||||
// escape analysis/ side-effects/ local
|
||||
// optimisations. Proper benchmarks with JMH (bulk ops, single-shot mode)
|
||||
// should be better here.
|
||||
// https://github.com/carrotsearch/hppc/blob/master/hppc-benchmarks/src/main/java/com/carrotsearch/hppc/benchmarks/B003_HashSet_Contains.java
|
||||
|
||||
public class StringMapAdjustOrPutBenchmark {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
@ -50,12 +56,12 @@ public class StringMapAdjustOrPutBenchmark {
|
|||
StopWatch stopWatch;
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
ObjectIntOpenHashMap<String> map = new ObjectIntOpenHashMap<>();
|
||||
ObjectIntHashMap<String> map = new ObjectIntHashMap<>();
|
||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||
if (REUSE) {
|
||||
map.clear();
|
||||
} else {
|
||||
map = new ObjectIntOpenHashMap<>();
|
||||
map = new ObjectIntHashMap<>();
|
||||
}
|
||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||
map.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
||||
|
@ -69,12 +75,12 @@ public class StringMapAdjustOrPutBenchmark {
|
|||
|
||||
stopWatch = new StopWatch().start();
|
||||
// TObjectIntCustomHashMap<String> iMap = new TObjectIntCustomHashMap<String>(new StringIdentityHashingStrategy());
|
||||
ObjectIntOpenHashMap<String> iMap = new ObjectIntOpenHashMap<>();
|
||||
ObjectIntHashMap<String> iMap = new ObjectIntHashMap<>();
|
||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||
if (REUSE) {
|
||||
iMap.clear();
|
||||
} else {
|
||||
iMap = new ObjectIntOpenHashMap<>();
|
||||
iMap = new ObjectIntHashMap<>();
|
||||
}
|
||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||
iMap.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
||||
|
@ -86,12 +92,12 @@ public class StringMapAdjustOrPutBenchmark {
|
|||
iMap = null;
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
iMap = new ObjectIntOpenHashMap<>();
|
||||
iMap = new ObjectIntHashMap<>();
|
||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||
if (REUSE) {
|
||||
iMap.clear();
|
||||
} else {
|
||||
iMap = new ObjectIntOpenHashMap<>();
|
||||
iMap = new ObjectIntHashMap<>();
|
||||
}
|
||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||
iMap.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
||||
|
@ -104,12 +110,12 @@ public class StringMapAdjustOrPutBenchmark {
|
|||
|
||||
// now test with THashMap
|
||||
stopWatch = new StopWatch().start();
|
||||
ObjectObjectOpenHashMap<String, StringEntry> tMap = new ObjectObjectOpenHashMap<>();
|
||||
ObjectObjectHashMap<String, StringEntry> tMap = new ObjectObjectHashMap<>();
|
||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||
if (REUSE) {
|
||||
tMap.clear();
|
||||
} else {
|
||||
tMap = new ObjectObjectOpenHashMap<>();
|
||||
tMap = new ObjectObjectHashMap<>();
|
||||
}
|
||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||
String key = values[(int) (i % NUMBER_OF_KEYS)];
|
||||
|
@ -187,12 +193,12 @@ public class StringMapAdjustOrPutBenchmark {
|
|||
}
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
IntIntOpenHashMap intMap = new IntIntOpenHashMap();
|
||||
IntIntHashMap intMap = new IntIntHashMap();
|
||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||
if (REUSE) {
|
||||
intMap.clear();
|
||||
} else {
|
||||
intMap = new IntIntOpenHashMap();
|
||||
intMap = new IntIntHashMap();
|
||||
}
|
||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||
int key = iValues[(int) (i % NUMBER_OF_KEYS)];
|
||||
|
@ -207,12 +213,12 @@ public class StringMapAdjustOrPutBenchmark {
|
|||
|
||||
// now test with THashMap
|
||||
stopWatch = new StopWatch().start();
|
||||
IntObjectOpenHashMap<IntEntry> tIntMap = new IntObjectOpenHashMap<>();
|
||||
IntObjectHashMap<IntEntry> tIntMap = new IntObjectHashMap<>();
|
||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||
if (REUSE) {
|
||||
tIntMap.clear();
|
||||
} else {
|
||||
tIntMap = new IntObjectOpenHashMap<>();
|
||||
tIntMap = new IntObjectHashMap<>();
|
||||
}
|
||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||
int key = iValues[(int) (i % NUMBER_OF_KEYS)];
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.benchmark.search.aggregations;
|
||||
|
||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
|
@ -111,7 +111,7 @@ public class GlobalOrdinalsBenchmark {
|
|||
.endArray()
|
||||
.endObject().endObject())
|
||||
.get();
|
||||
ObjectOpenHashSet<String> uniqueTerms = ObjectOpenHashSet.newInstance();
|
||||
ObjectHashSet<String> uniqueTerms = new ObjectHashSet<>();
|
||||
for (int i = 0; i < FIELD_LIMIT; i++) {
|
||||
boolean added;
|
||||
do {
|
||||
|
@ -122,16 +122,11 @@ public class GlobalOrdinalsBenchmark {
|
|||
uniqueTerms = null;
|
||||
|
||||
BulkRequestBuilder builder = client.prepareBulk();
|
||||
IntIntOpenHashMap tracker = new IntIntOpenHashMap();
|
||||
IntIntHashMap tracker = new IntIntHashMap();
|
||||
for (int i = 0; i < COUNT; i++) {
|
||||
Map<String, Object> fieldValues = new HashMap<>();
|
||||
for (int fieldSuffix = 1; fieldSuffix <= FIELD_LIMIT; fieldSuffix <<= 1) {
|
||||
int index;
|
||||
if (tracker.containsKey(fieldSuffix)) {
|
||||
index = tracker.lget();
|
||||
} else {
|
||||
tracker.put(fieldSuffix, index = 0);
|
||||
}
|
||||
int index = tracker.putOrAdd(fieldSuffix, 0, 0);
|
||||
if (index >= fieldSuffix) {
|
||||
index = random.nextInt(fieldSuffix);
|
||||
fieldValues.put("field_" + fieldSuffix, sValues[index]);
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.benchmark.search.aggregations;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectScatterSet;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
|
@ -137,7 +137,7 @@ public class SubAggregationSearchCollectModeBenchmark {
|
|||
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
||||
lValues[i] = ThreadLocalRandom.current().nextLong();
|
||||
}
|
||||
ObjectOpenHashSet<String> uniqueTerms = ObjectOpenHashSet.newInstance();
|
||||
ObjectScatterSet<String> uniqueTerms = new ObjectScatterSet<>();
|
||||
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
||||
boolean added;
|
||||
do {
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.benchmark.search.aggregations;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectScatterSet;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||
|
@ -96,7 +96,7 @@ public class TermsAggregationSearchAndIndexingBenchmark {
|
|||
Thread.sleep(5000);
|
||||
|
||||
long startTime = System.currentTimeMillis();
|
||||
ObjectOpenHashSet<String> uniqueTerms = ObjectOpenHashSet.newInstance();
|
||||
ObjectScatterSet<String> uniqueTerms = new ObjectScatterSet<>();
|
||||
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
||||
boolean added;
|
||||
do {
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.benchmark.search.aggregations;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectScatterSet;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
|||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
|
@ -161,7 +160,7 @@ public class TermsAggregationSearchBenchmark {
|
|||
.endObject()
|
||||
.endObject())).actionGet();
|
||||
|
||||
ObjectOpenHashSet<String> uniqueTerms = ObjectOpenHashSet.newInstance();
|
||||
ObjectScatterSet<String> uniqueTerms = new ObjectScatterSet<>();
|
||||
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
||||
boolean added;
|
||||
do {
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.benchmark.search.child;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
|
@ -49,8 +49,8 @@ public class ParentChildIndexGenerator {
|
|||
|
||||
public void index() {
|
||||
// Memory intensive...
|
||||
ObjectOpenHashSet<String> usedParentIds = ObjectOpenHashSet.newInstanceWithCapacity(numParents, 0.5f);
|
||||
ObjectArrayList<ParentDocument> parents = ObjectArrayList.newInstanceWithCapacity(numParents);
|
||||
ObjectHashSet<String> usedParentIds = new ObjectHashSet<>(numParents, 0.5d);
|
||||
ObjectArrayList<ParentDocument> parents = new ObjectArrayList<>(numParents);
|
||||
|
||||
for (int i = 0; i < numParents; i++) {
|
||||
String parentId;
|
||||
|
|
|
@ -19,9 +19,8 @@
|
|||
|
||||
package org.elasticsearch.cluster.allocation;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.google.common.base.Predicate;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||
|
@ -32,7 +31,6 @@ import org.elasticsearch.common.logging.ESLogger;
|
|||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.discovery.zen.ZenDiscovery;
|
||||
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
|
||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
|
||||
|
@ -95,7 +93,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
|||
logger.info("--> checking current state");
|
||||
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||
// verify that we have all the primaries on node3
|
||||
ObjectIntOpenHashMap<String> counts = new ObjectIntOpenHashMap<>();
|
||||
ObjectIntHashMap<String> counts = new ObjectIntHashMap<>();
|
||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
||||
|
@ -133,7 +131,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
|||
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("4").setWaitForRelocatingShards(0).execute().actionGet();
|
||||
assertThat(health.isTimedOut(), equalTo(false));
|
||||
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||
ObjectIntOpenHashMap<String> counts = new ObjectIntOpenHashMap<>();
|
||||
ObjectIntHashMap<String> counts = new ObjectIntHashMap<>();
|
||||
|
||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
|
@ -169,7 +167,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
|||
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").setWaitForRelocatingShards(0).execute().actionGet();
|
||||
assertThat(health.isTimedOut(), equalTo(false));
|
||||
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||
ObjectIntOpenHashMap<String> counts = new ObjectIntOpenHashMap<>();
|
||||
ObjectIntHashMap<String> counts = new ObjectIntHashMap<>();
|
||||
|
||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
|
@ -191,7 +189,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(health.isTimedOut(), equalTo(false));
|
||||
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||
|
||||
counts = new ObjectIntOpenHashMap<>();
|
||||
counts = new ObjectIntHashMap<>();
|
||||
|
||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
|
@ -213,7 +211,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(health.isTimedOut(), equalTo(false));
|
||||
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||
|
||||
counts = new ObjectIntOpenHashMap<>();
|
||||
counts = new ObjectIntHashMap<>();
|
||||
|
||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
|
@ -234,7 +232,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(health.isTimedOut(), equalTo(false));
|
||||
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||
|
||||
counts = new ObjectIntOpenHashMap<>();
|
||||
counts = new ObjectIntHashMap<>();
|
||||
|
||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.indices.IndexMissingException;
|
|||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
|
||||
import static com.google.common.collect.Sets.newHashSet;
|
||||
|
@ -67,9 +68,8 @@ public class MetaDataTests extends ElasticsearchTestCase {
|
|||
assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar"));
|
||||
|
||||
results = md.concreteIndices(options, "foofoobar");
|
||||
assertEquals(2, results.length);
|
||||
assertEquals("foo", results[0]);
|
||||
assertEquals("foobar", results[1]);
|
||||
assertEquals(new HashSet<>(Arrays.asList("foo", "foobar")),
|
||||
new HashSet<>(Arrays.asList(results)));
|
||||
|
||||
try {
|
||||
md.concreteIndices(options, "bar");
|
||||
|
@ -151,8 +151,8 @@ public class MetaDataTests extends ElasticsearchTestCase {
|
|||
|
||||
results = md.concreteIndices(options, "foofoobar");
|
||||
assertEquals(2, results.length);
|
||||
assertEquals("foo", results[0]);
|
||||
assertEquals("foobar", results[1]);
|
||||
assertEquals(new HashSet<>(Arrays.asList("foo", "foobar")),
|
||||
new HashSet<>(Arrays.asList(results)));
|
||||
|
||||
results = md.concreteIndices(options, "foo", "bar");
|
||||
assertEquals(1, results.length);
|
||||
|
|
|
@ -59,6 +59,7 @@ public class ClusterSerializationTests extends ElasticsearchAllocationTestCase {
|
|||
ClusterState serializedClusterState = ClusterState.Builder.fromBytes(ClusterState.Builder.toBytes(clusterState), newNode("node1"));
|
||||
|
||||
assertThat(serializedClusterState.getClusterName().value(), equalTo(clusterState.getClusterName().value()));
|
||||
|
||||
assertThat(serializedClusterState.routingTable().prettyPrint(), equalTo(clusterState.routingTable().prettyPrint()));
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.common.hppc;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import org.elasticsearch.common.collect.HppcMaps;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
@ -35,48 +35,48 @@ public class HppcMapsTests extends ElasticsearchTestCase {
|
|||
boolean enabled = false;
|
||||
assert enabled = true;
|
||||
assumeTrue("assertions enabled", enabled);
|
||||
ObjectOpenHashSet<String> set1 = ObjectOpenHashSet.from("1", "2", "3");
|
||||
ObjectOpenHashSet<String> set2 = ObjectOpenHashSet.from("1", "2", "3");
|
||||
ObjectHashSet<String> set1 = ObjectHashSet.from("1", "2", "3");
|
||||
ObjectHashSet<String> set2 = ObjectHashSet.from("1", "2", "3");
|
||||
List<String> values = toList(HppcMaps.intersection(set1, set2));
|
||||
assertThat(values.size(), equalTo(3));
|
||||
assertThat(values.contains("1"), equalTo(true));
|
||||
assertThat(values.contains("2"), equalTo(true));
|
||||
assertThat(values.contains("3"), equalTo(true));
|
||||
|
||||
set1 = ObjectOpenHashSet.from("1", "2", "3");
|
||||
set2 = ObjectOpenHashSet.from("3", "4", "5");
|
||||
set1 = ObjectHashSet.from("1", "2", "3");
|
||||
set2 = ObjectHashSet.from("3", "4", "5");
|
||||
values = toList(HppcMaps.intersection(set1, set2));
|
||||
assertThat(values.size(), equalTo(1));
|
||||
assertThat(values.get(0), equalTo("3"));
|
||||
|
||||
set1 = ObjectOpenHashSet.from("1", "2", "3");
|
||||
set2 = ObjectOpenHashSet.from("4", "5", "6");
|
||||
set1 = ObjectHashSet.from("1", "2", "3");
|
||||
set2 = ObjectHashSet.from("4", "5", "6");
|
||||
values = toList(HppcMaps.intersection(set1, set2));
|
||||
assertThat(values.size(), equalTo(0));
|
||||
|
||||
set1 = ObjectOpenHashSet.from();
|
||||
set2 = ObjectOpenHashSet.from("3", "4", "5");
|
||||
set1 = ObjectHashSet.from();
|
||||
set2 = ObjectHashSet.from("3", "4", "5");
|
||||
values = toList(HppcMaps.intersection(set1, set2));
|
||||
assertThat(values.size(), equalTo(0));
|
||||
|
||||
set1 = ObjectOpenHashSet.from("1", "2", "3");
|
||||
set2 = ObjectOpenHashSet.from();
|
||||
set1 = ObjectHashSet.from("1", "2", "3");
|
||||
set2 = ObjectHashSet.from();
|
||||
values = toList(HppcMaps.intersection(set1, set2));
|
||||
assertThat(values.size(), equalTo(0));
|
||||
|
||||
set1 = ObjectOpenHashSet.from();
|
||||
set2 = ObjectOpenHashSet.from();
|
||||
set1 = ObjectHashSet.from();
|
||||
set2 = ObjectHashSet.from();
|
||||
values = toList(HppcMaps.intersection(set1, set2));
|
||||
assertThat(values.size(), equalTo(0));
|
||||
|
||||
set1 = null;
|
||||
set2 = ObjectOpenHashSet.from();
|
||||
set2 = ObjectHashSet.from();
|
||||
try {
|
||||
toList(HppcMaps.intersection(set1, set2));
|
||||
fail();
|
||||
} catch (AssertionError e) {}
|
||||
|
||||
set1 = ObjectOpenHashSet.from();
|
||||
set1 = ObjectHashSet.from();
|
||||
set2 = null;
|
||||
try {
|
||||
toList(HppcMaps.intersection(set1, set2));
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongMap;
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
|
@ -56,7 +56,7 @@ public class BytesRefHashTests extends ElasticsearchSingleNodeTest {
|
|||
for (int i = 0; i < values.length; ++i) {
|
||||
values[i] = new BytesRef(randomAsciiOfLength(5));
|
||||
}
|
||||
final ObjectLongMap<BytesRef> valueToId = new ObjectLongOpenHashMap<>();
|
||||
final ObjectLongMap<BytesRef> valueToId = new ObjectLongHashMap<>();
|
||||
final BytesRef[] idToValue = new BytesRef[values.length];
|
||||
final int iters = randomInt(1000000);
|
||||
for (int i = 0; i < iters; ++i) {
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.hppc.LongLongHashMap;
|
||||
import com.carrotsearch.hppc.LongLongMap;
|
||||
import com.carrotsearch.hppc.LongLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.cursors.LongLongCursor;
|
||||
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
||||
import org.junit.Test;
|
||||
|
@ -52,7 +52,7 @@ public class LongHashTests extends ElasticsearchSingleNodeTest {
|
|||
for (int i = 0; i < values.length; ++i) {
|
||||
values[i] = randomLong();
|
||||
}
|
||||
final LongLongMap valueToId = new LongLongOpenHashMap();
|
||||
final LongLongMap valueToId = new LongLongHashMap();
|
||||
final long[] idToValue = new long[values.length];
|
||||
final int iters = randomInt(1000000);
|
||||
for (int i = 0; i < iters; ++i) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.hppc.LongObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.LongObjectHashMap;
|
||||
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -27,7 +27,7 @@ public class LongObjectHashMapTests extends ElasticsearchSingleNodeTest {
|
|||
|
||||
@Test
|
||||
public void duel() {
|
||||
final LongObjectOpenHashMap<Object> map1 = new LongObjectOpenHashMap<>();
|
||||
final LongObjectHashMap<Object> map1 = new LongObjectHashMap<>();
|
||||
final LongObjectPagedHashMap<Object> map2 = new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays());
|
||||
final int maxKey = randomIntBetween(1, 10000);
|
||||
final int iters = scaledRandomIntBetween(10000, 100000);
|
||||
|
@ -48,7 +48,7 @@ public class LongObjectHashMapTests extends ElasticsearchSingleNodeTest {
|
|||
for (int i = 0; i <= maxKey; ++i) {
|
||||
assertSame(map1.get(i), map2.get(i));
|
||||
}
|
||||
final LongObjectOpenHashMap<Object> copy = new LongObjectOpenHashMap<>();
|
||||
final LongObjectHashMap<Object> copy = new LongObjectHashMap<>();
|
||||
for (LongObjectPagedHashMap.Cursor<Object> cursor : map2) {
|
||||
copy.put(cursor.key, cursor.value);
|
||||
}
|
||||
|
|
|
@ -19,8 +19,11 @@
|
|||
|
||||
package org.elasticsearch.index.fielddata;
|
||||
|
||||
import com.carrotsearch.hppc.DoubleOpenHashSet;
|
||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
||||
import com.carrotsearch.hppc.DoubleHashSet;
|
||||
import com.carrotsearch.hppc.LongHashSet;
|
||||
import com.carrotsearch.hppc.cursors.DoubleCursor;
|
||||
import com.carrotsearch.hppc.cursors.LongCursor;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.LongField;
|
||||
|
@ -324,23 +327,18 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
|||
public abstract long nextValue(Random r);
|
||||
}
|
||||
|
||||
private void test(List<LongOpenHashSet> values) throws Exception {
|
||||
private void test(List<LongHashSet> values) throws Exception {
|
||||
StringField id = new StringField("_id", "", Field.Store.NO);
|
||||
|
||||
for (int i = 0; i < values.size(); ++i) {
|
||||
Document doc = new Document();
|
||||
id.setStringValue("" + i);
|
||||
doc.add(id);
|
||||
final LongOpenHashSet v = values.get(i);
|
||||
final boolean[] states = v.allocated;
|
||||
final long[] keys = v.keys;
|
||||
|
||||
for (int j = 0; j < states.length; j++) {
|
||||
if (states[j]) {
|
||||
LongField value = new LongField("value", keys[j], Field.Store.NO);
|
||||
final LongHashSet v = values.get(i);
|
||||
for (LongCursor c : v) {
|
||||
LongField value = new LongField("value", c.value, Field.Store.NO);
|
||||
doc.add(value);
|
||||
}
|
||||
}
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
writer.forceMerge(1, true);
|
||||
|
@ -349,10 +347,10 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
|||
final AtomicNumericFieldData atomicFieldData = indexFieldData.load(refreshReader());
|
||||
final SortedNumericDocValues data = atomicFieldData.getLongValues();
|
||||
final SortedNumericDoubleValues doubleData = atomicFieldData.getDoubleValues();
|
||||
final LongOpenHashSet set = new LongOpenHashSet();
|
||||
final DoubleOpenHashSet doubleSet = new DoubleOpenHashSet();
|
||||
final LongHashSet set = new LongHashSet();
|
||||
final DoubleHashSet doubleSet = new DoubleHashSet();
|
||||
for (int i = 0; i < values.size(); ++i) {
|
||||
final LongOpenHashSet v = values.get(i);
|
||||
final LongHashSet v = values.get(i);
|
||||
|
||||
data.setDocument(i);
|
||||
assertThat(data.count() > 0, equalTo(!v.isEmpty()));
|
||||
|
@ -367,13 +365,9 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
|||
}
|
||||
assertThat(set, equalTo(v));
|
||||
|
||||
final DoubleOpenHashSet doubleV = new DoubleOpenHashSet();
|
||||
final boolean[] states = v.allocated;
|
||||
final long[] keys = v.keys;
|
||||
for (int j = 0; j < states.length; j++) {
|
||||
if (states[j]) {
|
||||
doubleV.add((double) keys[j]);
|
||||
}
|
||||
final DoubleHashSet doubleV = new DoubleHashSet();
|
||||
for (LongCursor c : v) {
|
||||
doubleV.add(c.value);
|
||||
}
|
||||
doubleSet.clear();
|
||||
doubleData.setDocument(i);
|
||||
|
@ -394,10 +388,10 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
|||
private void test(Data data) throws Exception {
|
||||
Random r = getRandom();
|
||||
final int numDocs = 1000 + r.nextInt(19000);
|
||||
final List<LongOpenHashSet> values = new ArrayList<>(numDocs);
|
||||
final List<LongHashSet> values = new ArrayList<>(numDocs);
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
final int numValues = data.numValues(r);
|
||||
final LongOpenHashSet vals = new LongOpenHashSet(numValues);
|
||||
final LongHashSet vals = new LongHashSet(numValues);
|
||||
for (int j = 0; j < numValues; ++j) {
|
||||
vals.add(data.nextValue(r));
|
||||
}
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -155,10 +155,10 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
|
|||
childValues[i] = Integer.toString(i);
|
||||
}
|
||||
|
||||
IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet();
|
||||
IntHashSet filteredOrDeletedDocs = new IntHashSet();
|
||||
int childDocId = 0;
|
||||
int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
|
||||
ObjectObjectOpenHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectOpenHashMap<>();
|
||||
ObjectObjectHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectHashMap<>();
|
||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||
boolean markParentAsDeleted = rarely();
|
||||
boolean filterMe = rarely();
|
||||
|
@ -194,7 +194,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
|
|||
if (!markChildAsDeleted) {
|
||||
NavigableSet<String> parentIds;
|
||||
if (childValueToParentIds.containsKey(childValue)) {
|
||||
parentIds = childValueToParentIds.lget();
|
||||
parentIds = childValueToParentIds.get(childValue);
|
||||
} else {
|
||||
childValueToParentIds.put(childValue, parentIds = new TreeSet<>());
|
||||
}
|
||||
|
@ -271,7 +271,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
|
|||
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
|
||||
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
||||
if (terms != null) {
|
||||
NavigableSet<String> parentIds = childValueToParentIds.lget();
|
||||
NavigableSet<String> parentIds = childValueToParentIds.get(childValue);
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
PostingsEnum docsEnum = null;
|
||||
for (String id : parentIds) {
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
|
@ -131,11 +131,11 @@ public class ChildrenQueryTests extends AbstractChildTests {
|
|||
childValues[i] = Integer.toString(i);
|
||||
}
|
||||
|
||||
IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet();
|
||||
IntHashSet filteredOrDeletedDocs = new IntHashSet();
|
||||
|
||||
int childDocId = 0;
|
||||
int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
|
||||
ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectOpenHashMap<>();
|
||||
ObjectObjectHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectHashMap<>();
|
||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||
boolean markParentAsDeleted = rarely();
|
||||
boolean filterMe = rarely();
|
||||
|
@ -171,7 +171,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
|
|||
if (!markChildAsDeleted) {
|
||||
NavigableMap<String, FloatArrayList> parentIdToChildScores;
|
||||
if (childValueToParentIds.containsKey(childValue)) {
|
||||
parentIdToChildScores = childValueToParentIds.lget();
|
||||
parentIdToChildScores = childValueToParentIds.get(childValue);
|
||||
} else {
|
||||
childValueToParentIds.put(childValue, parentIdToChildScores = new TreeMap<>());
|
||||
}
|
||||
|
@ -255,7 +255,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
|
|||
final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()];
|
||||
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
||||
if (terms != null) {
|
||||
NavigableMap<String, FloatArrayList> parentIdToChildScores = childValueToParentIds.lget();
|
||||
NavigableMap<String, FloatArrayList> parentIdToChildScores = childValueToParentIds.get(childValue);
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
PostingsEnum docsEnum = null;
|
||||
for (Map.Entry<String, FloatArrayList> entry : parentIdToChildScores.entrySet()) {
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -112,8 +112,8 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
|
|||
|
||||
int childDocId = 0;
|
||||
int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
|
||||
ObjectObjectOpenHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectOpenHashMap<>();
|
||||
IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap();
|
||||
ObjectObjectHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectHashMap<>();
|
||||
IntIntHashMap childIdToParentId = new IntIntHashMap();
|
||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||
boolean markParentAsDeleted = rarely();
|
||||
String parentValue = parentValues[random().nextInt(parentValues.length)];
|
||||
|
@ -152,7 +152,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
|
|||
if (!markParentAsDeleted) {
|
||||
NavigableSet<String> childIds;
|
||||
if (parentValueToChildDocIds.containsKey(parentValue)) {
|
||||
childIds = parentValueToChildDocIds.lget();
|
||||
childIds = parentValueToChildDocIds.get(parentValue);
|
||||
} else {
|
||||
parentValueToChildDocIds.put(parentValue, childIds = new TreeSet<>());
|
||||
}
|
||||
|
@ -222,7 +222,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
|
|||
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
|
||||
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
||||
if (terms != null) {
|
||||
NavigableSet<String> childIds = parentValueToChildDocIds.lget();
|
||||
NavigableSet<String> childIds = parentValueToChildDocIds.get(parentValue);
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
PostingsEnum docsEnum = null;
|
||||
for (String id : childIds) {
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -116,8 +116,8 @@ public class ParentQueryTests extends AbstractChildTests {
|
|||
|
||||
int childDocId = 0;
|
||||
int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
|
||||
ObjectObjectOpenHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectOpenHashMap<>();
|
||||
IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap();
|
||||
ObjectObjectHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectHashMap<>();
|
||||
IntIntHashMap childIdToParentId = new IntIntHashMap();
|
||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||
boolean markParentAsDeleted = rarely();
|
||||
String parentValue = parentValues[random().nextInt(parentValues.length)];
|
||||
|
@ -153,10 +153,8 @@ public class ParentQueryTests extends AbstractChildTests {
|
|||
indexWriter.addDocument(document);
|
||||
|
||||
if (!markParentAsDeleted) {
|
||||
NavigableMap<String, Float> childIdToScore;
|
||||
if (parentValueToChildIds.containsKey(parentValue)) {
|
||||
childIdToScore = parentValueToChildIds.lget();
|
||||
} else {
|
||||
NavigableMap<String, Float> childIdToScore = parentValueToChildIds.getOrDefault(parentValue, null);
|
||||
if (childIdToScore == null) {
|
||||
parentValueToChildIds.put(parentValue, childIdToScore = new TreeMap<>());
|
||||
}
|
||||
if (!markChildAsDeleted && !filterMe) {
|
||||
|
@ -226,7 +224,7 @@ public class ParentQueryTests extends AbstractChildTests {
|
|||
final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()];
|
||||
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
||||
if (terms != null) {
|
||||
NavigableMap<String, Float> childIdsAndScore = parentValueToChildIds.lget();
|
||||
NavigableMap<String, Float> childIdsAndScore = parentValueToChildIds.get(parentValue);
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
PostingsEnum docsEnum = null;
|
||||
for (Map.Entry<String, Float> entry : childIdsAndScore.entrySet()) {
|
||||
|
|
|
@ -711,27 +711,27 @@ public class IndexStatsTests extends ElasticsearchIntegrationTest {
|
|||
stats = builder.setFieldDataFields("bar").execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(false));
|
||||
|
||||
stats = builder.setFieldDataFields("bar", "baz").execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0l));
|
||||
|
||||
stats = builder.setFieldDataFields("*").execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0l));
|
||||
|
||||
stats = builder.setFieldDataFields("*r").execute().actionGet();
|
||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
|
||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(false));
|
||||
|
||||
}
|
||||
|
@ -758,27 +758,27 @@ public class IndexStatsTests extends ElasticsearchIntegrationTest {
|
|||
stats = builder.setCompletionFields("bar.completion").execute().actionGet();
|
||||
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(false));
|
||||
|
||||
stats = builder.setCompletionFields("bar.completion", "baz.completion").execute().actionGet();
|
||||
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(true));
|
||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().get("baz.completion"), greaterThan(0l));
|
||||
|
||||
stats = builder.setCompletionFields("*").execute().actionGet();
|
||||
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(true));
|
||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().get("baz.completion"), greaterThan(0l));
|
||||
|
||||
stats = builder.setCompletionFields("*r*").execute().actionGet();
|
||||
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
|
||||
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(false));
|
||||
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.recovery;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.procedures.IntProcedure;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
@ -229,7 +229,7 @@ public class RelocationTests extends ElasticsearchIntegrationTest {
|
|||
for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) {
|
||||
hitIds[hit] = hit + 1;
|
||||
}
|
||||
IntOpenHashSet set = IntOpenHashSet.from(hitIds);
|
||||
IntHashSet set = IntHashSet.from(hitIds);
|
||||
for (SearchHit hit : hits.hits()) {
|
||||
int id = Integer.parseInt(hit.id());
|
||||
if (!set.remove(id)) {
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import com.carrotsearch.hppc.IntIntMap;
|
||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -60,7 +60,7 @@ public class CombiTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
createIndex("idx");
|
||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)];
|
||||
IntIntMap values = new IntIntOpenHashMap();
|
||||
IntIntMap values = new IntIntHashMap();
|
||||
long missingValues = 0;
|
||||
for (int i = 0; i < builders.length; i++) {
|
||||
String name = "name_" + randomIntBetween(1, 10);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
|
@ -174,7 +174,7 @@ public class EquivalenceTests extends ElasticsearchIntegrationTest {
|
|||
final int numDocs = scaledRandomIntBetween(1000, 2000);
|
||||
final int maxNumTerms = randomIntBetween(10, 5000);
|
||||
|
||||
final IntOpenHashSet valuesSet = new IntOpenHashSet();
|
||||
final IntHashSet valuesSet = new IntHashSet();
|
||||
cluster().wipeIndices("idx");
|
||||
prepareCreate("idx")
|
||||
.addMapping("type", jsonBuilder().startObject()
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import com.carrotsearch.hppc.IntIntMap;
|
||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
|
||||
|
@ -52,7 +52,7 @@ public class MetaDataTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
createIndex("idx");
|
||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)];
|
||||
IntIntMap values = new IntIntOpenHashMap();
|
||||
IntIntMap values = new IntIntHashMap();
|
||||
long missingValues = 0;
|
||||
for (int i = 0; i < builders.length; i++) {
|
||||
String name = "name_" + randomIntBetween(1, 10);
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntMap;
|
||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
|
@ -81,7 +81,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
List<IndexRequestBuilder> cities = new ArrayList<>();
|
||||
Random random = getRandom();
|
||||
expectedDocCountsForGeoHash = new ObjectIntOpenHashMap<>(numDocs * 2);
|
||||
expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
//generate random point
|
||||
double lat = (180d * random.nextDouble()) - 90d;
|
||||
|
@ -105,7 +105,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
|
|||
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed"));
|
||||
|
||||
cities = new ArrayList<>();
|
||||
multiValuedExpectedDocCountsForGeoHash = new ObjectIntOpenHashMap<>(numDocs * 2);
|
||||
multiValuedExpectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
final int numPoints = random.nextInt(4);
|
||||
List<String> points = new ArrayList<>();
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
||||
import com.carrotsearch.hppc.LongHashSet;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
|
@ -270,7 +270,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(histo.getName(), equalTo("histo"));
|
||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||
|
||||
LongOpenHashSet buckets = new LongOpenHashSet();
|
||||
LongHashSet buckets = new LongHashSet();
|
||||
// TODO: use diamond once JI-9019884 is fixed
|
||||
List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||
long previousCount = Long.MIN_VALUE;
|
||||
|
@ -300,7 +300,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(histo.getName(), equalTo("histo"));
|
||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||
|
||||
LongOpenHashSet buckets = new LongOpenHashSet();
|
||||
LongHashSet buckets = new LongHashSet();
|
||||
// TODO: use diamond once JI-9019884 is fixed
|
||||
List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||
long previousCount = Long.MAX_VALUE;
|
||||
|
@ -407,7 +407,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(histo.getName(), equalTo("histo"));
|
||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||
|
||||
LongOpenHashSet visited = new LongOpenHashSet();
|
||||
LongHashSet visited = new LongHashSet();
|
||||
double previousSum = Double.NEGATIVE_INFINITY;
|
||||
// TODO: use diamond once JI-9019884 is fixed
|
||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||
|
@ -448,7 +448,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(histo.getName(), equalTo("histo"));
|
||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||
|
||||
LongOpenHashSet visited = new LongOpenHashSet();
|
||||
LongHashSet visited = new LongHashSet();
|
||||
double previousSum = Double.POSITIVE_INFINITY;
|
||||
// TODO: use diamond once JI-9019884 is fixed
|
||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||
|
@ -489,7 +489,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(histo.getName(), equalTo("histo"));
|
||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||
|
||||
LongOpenHashSet visited = new LongOpenHashSet();
|
||||
LongHashSet visited = new LongHashSet();
|
||||
double previousSum = Double.NEGATIVE_INFINITY;
|
||||
// TODO: use diamond once JI-9019884 is fixed
|
||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||
|
@ -530,7 +530,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(histo.getName(), equalTo("histo"));
|
||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||
|
||||
LongOpenHashSet visited = new LongOpenHashSet();
|
||||
LongHashSet visited = new LongHashSet();
|
||||
double previousSum = Double.POSITIVE_INFINITY;
|
||||
// TODO: use diamond once JI-9019884 is fixed
|
||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||
|
@ -573,7 +573,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(histo.getName(), equalTo("histo"));
|
||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||
|
||||
LongOpenHashSet visited = new LongOpenHashSet();
|
||||
LongHashSet visited = new LongHashSet();
|
||||
double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
|
||||
// TODO: use diamond once JI-9019884 is fixed
|
||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
||||
import com.carrotsearch.hppc.LongHashSet;
|
||||
import com.carrotsearch.hppc.LongSet;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
|
||||
|
@ -67,7 +67,7 @@ public class MinDocCountTests extends AbstractTermsTests {
|
|||
cardinality = randomIntBetween(8, 30);
|
||||
final List<IndexRequestBuilder> indexRequests = new ArrayList<>();
|
||||
final Set<String> stringTerms = new HashSet<>();
|
||||
final LongSet longTerms = new LongOpenHashSet();
|
||||
final LongSet longTerms = new LongHashSet();
|
||||
final Set<String> dateTerms = new HashSet<>();
|
||||
for (int i = 0; i < cardinality; ++i) {
|
||||
String stringTerm;
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
||||
import com.carrotsearch.hppc.BitMixer;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
@ -62,12 +62,12 @@ public class HyperLogLogPlusPlusTests extends ElasticsearchTestCase {
|
|||
final int numValues = randomIntBetween(1, 100000);
|
||||
final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 100000);
|
||||
final int p = randomIntBetween(14, MAX_PRECISION);
|
||||
IntOpenHashSet set = new IntOpenHashSet();
|
||||
IntHashSet set = new IntHashSet();
|
||||
HyperLogLogPlusPlus e = new HyperLogLogPlusPlus(p, BigArrays.NON_RECYCLING_INSTANCE, 1);
|
||||
for (int i = 0; i < numValues; ++i) {
|
||||
final int n = randomInt(maxValue);
|
||||
set.add(n);
|
||||
final long hash = MurmurHash3.hash((long) n);
|
||||
final long hash = BitMixer.mix64(n);
|
||||
e.collect(bucket, hash);
|
||||
if (randomInt(100) == 0) {
|
||||
//System.out.println(e.cardinality(bucket) + " <> " + set.size());
|
||||
|
@ -91,7 +91,7 @@ public class HyperLogLogPlusPlusTests extends ElasticsearchTestCase {
|
|||
final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 1000000);
|
||||
for (int i = 0; i < numValues; ++i) {
|
||||
final int n = randomInt(maxValue);
|
||||
final long hash = MurmurHash3.hash((long) n);
|
||||
final long hash = BitMixer.mix64(n);
|
||||
single.collect(0, hash);
|
||||
// use a gaussian so that all instances don't collect as many hashes
|
||||
final int index = (int) (Math.pow(randomDouble(), 2));
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.scroll;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -159,7 +159,7 @@ public class DuelScrollTests extends ElasticsearchIntegrationTest {
|
|||
boolean unevenRouting = randomBoolean();
|
||||
|
||||
int numMissingDocs = scaledRandomIntBetween(0, numDocs / 100);
|
||||
IntOpenHashSet missingDocs = new IntOpenHashSet(numMissingDocs);
|
||||
IntHashSet missingDocs = new IntHashSet(numMissingDocs);
|
||||
for (int i = 0; i < numMissingDocs; i++) {
|
||||
while (!missingDocs.add(randomInt(numDocs))) {}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
|
@ -751,7 +751,7 @@ public class CompletionSuggestSearchTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
// regexes
|
||||
IndicesStatsResponse regexFieldStats = client().admin().indices().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).setCompletionFields("*").get();
|
||||
ObjectLongOpenHashMap<String> fields = regexFieldStats.getIndex(INDEX).getPrimaries().completion.getFields();
|
||||
ObjectLongHashMap<String> fields = regexFieldStats.getIndex(INDEX).getPrimaries().completion.getFields();
|
||||
long regexSizeInBytes = fields.get(FIELD) + fields.get(otherField);
|
||||
assertThat(regexSizeInBytes, is(totalSizeInBytes));
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.suggest.completion;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
||||
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
|
@ -261,9 +261,9 @@ public class AnalyzingCompletionLookupProviderV1 extends CompletionLookupProvide
|
|||
@Override
|
||||
public CompletionStats stats(String... fields) {
|
||||
long sizeInBytes = 0;
|
||||
ObjectLongOpenHashMap<String> completionFields = null;
|
||||
ObjectLongHashMap<String> completionFields = null;
|
||||
if (fields != null && fields.length > 0) {
|
||||
completionFields = new ObjectLongOpenHashMap<>(fields.length);
|
||||
completionFields = new ObjectLongHashMap<>(fields.length);
|
||||
}
|
||||
|
||||
for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.snapshots;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.IntSet;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
@ -581,7 +581,7 @@ public class DedicatedClusterSnapshotRestoreTests extends AbstractSnapshotTests
|
|||
ensureGreen("test-idx");
|
||||
assertThat(client().prepareCount("test-idx").get().getCount(), equalTo(100L));
|
||||
|
||||
IntSet reusedShards = IntOpenHashSet.newInstance();
|
||||
IntSet reusedShards = new IntHashSet();
|
||||
for (ShardRecoveryResponse response : client().admin().indices().prepareRecoveries("test-idx").get().shardResponses().get("test-idx")) {
|
||||
if (response.recoveryState().getIndex().reusedBytes() > 0) {
|
||||
reusedShards.add(response.getShardId());
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.transport.netty;
|
||||
|
||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
|
@ -156,7 +156,7 @@ public class NettyTransportMultiPortTests extends ElasticsearchTestCase {
|
|||
}
|
||||
|
||||
private int[] getRandomPorts(int numberOfPorts) {
|
||||
IntOpenHashSet ports = new IntOpenHashSet();
|
||||
IntHashSet ports = new IntHashSet();
|
||||
|
||||
for (int i = 0; i < numberOfPorts; i++) {
|
||||
int port = randomIntBetween(49152, 65535);
|
||||
|
|
Loading…
Reference in New Issue