parent
472cc0af08
commit
491b00c4ec
9
pom.xml
9
pom.xml
|
@ -226,7 +226,14 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.carrotsearch</groupId>
|
<groupId>com.carrotsearch</groupId>
|
||||||
<artifactId>hppc</artifactId>
|
<artifactId>hppc</artifactId>
|
||||||
<version>0.6.0</version>
|
<version>0.7.1</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency> <!-- ES uses byte* hashes -->
|
||||||
|
<groupId>com.carrotsearch</groupId>
|
||||||
|
<artifactId>hppc</artifactId>
|
||||||
|
<version>0.7.1</version>
|
||||||
|
<classifier>esoteric</classifier>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|
|
@ -827,10 +827,7 @@ public class MapperQueryParser extends QueryParser {
|
||||||
|
|
||||||
private void applyBoost(String field, Query q) {
|
private void applyBoost(String field, Query q) {
|
||||||
if (settings.boosts() != null) {
|
if (settings.boosts() != null) {
|
||||||
float boost = 1f;
|
float boost = settings.boosts().getOrDefault(field, 1f);
|
||||||
if (settings.boosts().containsKey(field)) {
|
|
||||||
boost = settings.boosts().lget();
|
|
||||||
}
|
|
||||||
q.setBoost(boost);
|
q.setBoost(boost);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.apache.lucene.queryparser.classic;
|
package org.apache.lucene.queryparser.classic;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.search.FuzzyQuery;
|
import org.apache.lucene.search.FuzzyQuery;
|
||||||
|
@ -69,7 +69,7 @@ public class QueryParserSettings {
|
||||||
|
|
||||||
List<String> fields = null;
|
List<String> fields = null;
|
||||||
Collection<String> queryTypes = null;
|
Collection<String> queryTypes = null;
|
||||||
ObjectFloatOpenHashMap<String> boosts = null;
|
ObjectFloatHashMap<String> boosts = null;
|
||||||
float tieBreaker = 0.0f;
|
float tieBreaker = 0.0f;
|
||||||
boolean useDisMax = true;
|
boolean useDisMax = true;
|
||||||
|
|
||||||
|
@ -286,11 +286,11 @@ public class QueryParserSettings {
|
||||||
this.queryTypes = queryTypes;
|
this.queryTypes = queryTypes;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectFloatOpenHashMap<String> boosts() {
|
public ObjectFloatHashMap<String> boosts() {
|
||||||
return boosts;
|
return boosts;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void boosts(ObjectFloatOpenHashMap<String> boosts) {
|
public void boosts(ObjectFloatHashMap<String> boosts) {
|
||||||
this.boosts = boosts;
|
this.boosts = boosts;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.lucene.search.suggest.analyzing;
|
package org.apache.lucene.search.suggest.analyzing;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
@ -1008,7 +1008,7 @@ public long ramBytesUsed() {
|
||||||
private BytesRefBuilder analyzed = new BytesRefBuilder();
|
private BytesRefBuilder analyzed = new BytesRefBuilder();
|
||||||
private final SurfaceFormAndPayload[] surfaceFormsAndPayload;
|
private final SurfaceFormAndPayload[] surfaceFormsAndPayload;
|
||||||
private int count;
|
private int count;
|
||||||
private ObjectIntOpenHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f);
|
private ObjectIntHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f);
|
||||||
private int payloadSep;
|
private int payloadSep;
|
||||||
|
|
||||||
public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads, int payloadSep) {
|
public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads, int payloadSep) {
|
||||||
|
@ -1061,9 +1061,11 @@ public long ramBytesUsed() {
|
||||||
// dups: skip the rest:
|
// dups: skip the rest:
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
BytesRef surfaceCopy;
|
BytesRef surfaceCopy;
|
||||||
if (count > 0 && seenSurfaceForms.containsKey(surface)) {
|
final int keySlot;
|
||||||
surfaceIndex = seenSurfaceForms.lget();
|
if (count > 0 && (keySlot = seenSurfaceForms.indexOf(surface)) >= 0) {
|
||||||
|
surfaceIndex = seenSurfaceForms.indexGet(keySlot);
|
||||||
SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex];
|
SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex];
|
||||||
if (encodedWeight >= surfaceFormAndPayload.weight) {
|
if (encodedWeight >= surfaceFormAndPayload.weight) {
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.cluster.stats;
|
package org.elasticsearch.action.admin.cluster.stats;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||||
|
@ -57,7 +57,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public ClusterStatsIndices(ClusterStatsNodeResponse[] nodeResponses) {
|
public ClusterStatsIndices(ClusterStatsNodeResponse[] nodeResponses) {
|
||||||
ObjectObjectOpenHashMap<String, ShardStats> countsPerIndex = new ObjectObjectOpenHashMap<>();
|
ObjectObjectHashMap<String, ShardStats> countsPerIndex = new ObjectObjectHashMap<>();
|
||||||
|
|
||||||
this.docs = new DocsStats();
|
this.docs = new DocsStats();
|
||||||
this.store = new StoreStats();
|
this.store = new StoreStats();
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.cluster.stats;
|
package org.elasticsearch.action.admin.cluster.stats;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||||
|
@ -303,10 +303,10 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
||||||
|
|
||||||
int availableProcessors;
|
int availableProcessors;
|
||||||
long availableMemory;
|
long availableMemory;
|
||||||
ObjectIntOpenHashMap<OsInfo.Cpu> cpus;
|
ObjectIntHashMap<OsInfo.Cpu> cpus;
|
||||||
|
|
||||||
public OsStats() {
|
public OsStats() {
|
||||||
cpus = new ObjectIntOpenHashMap<>();
|
cpus = new ObjectIntHashMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addNodeInfo(NodeInfo nodeInfo) {
|
public void addNodeInfo(NodeInfo nodeInfo) {
|
||||||
|
@ -330,7 +330,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
||||||
return new ByteSizeValue(availableMemory);
|
return new ByteSizeValue(availableMemory);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectIntOpenHashMap<OsInfo.Cpu> getCpus() {
|
public ObjectIntHashMap<OsInfo.Cpu> getCpus() {
|
||||||
return cpus;
|
return cpus;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -339,7 +339,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
||||||
availableProcessors = in.readVInt();
|
availableProcessors = in.readVInt();
|
||||||
availableMemory = in.readLong();
|
availableMemory = in.readLong();
|
||||||
int size = in.readVInt();
|
int size = in.readVInt();
|
||||||
cpus = new ObjectIntOpenHashMap<>(size);
|
cpus = new ObjectIntHashMap<>(size);
|
||||||
for (; size > 0; size--) {
|
for (; size > 0; size--) {
|
||||||
cpus.addTo(OsInfo.Cpu.readCpu(in), in.readVInt());
|
cpus.addTo(OsInfo.Cpu.readCpu(in), in.readVInt());
|
||||||
}
|
}
|
||||||
|
@ -496,21 +496,21 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
||||||
|
|
||||||
public static class JvmStats implements Streamable, ToXContent {
|
public static class JvmStats implements Streamable, ToXContent {
|
||||||
|
|
||||||
ObjectIntOpenHashMap<JvmVersion> versions;
|
ObjectIntHashMap<JvmVersion> versions;
|
||||||
long threads;
|
long threads;
|
||||||
long maxUptime;
|
long maxUptime;
|
||||||
long heapUsed;
|
long heapUsed;
|
||||||
long heapMax;
|
long heapMax;
|
||||||
|
|
||||||
JvmStats() {
|
JvmStats() {
|
||||||
versions = new ObjectIntOpenHashMap<>();
|
versions = new ObjectIntHashMap<>();
|
||||||
threads = 0;
|
threads = 0;
|
||||||
maxUptime = 0;
|
maxUptime = 0;
|
||||||
heapMax = 0;
|
heapMax = 0;
|
||||||
heapUsed = 0;
|
heapUsed = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectIntOpenHashMap<JvmVersion> getVersions() {
|
public ObjectIntHashMap<JvmVersion> getVersions() {
|
||||||
return versions;
|
return versions;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -561,7 +561,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
||||||
@Override
|
@Override
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
public void readFrom(StreamInput in) throws IOException {
|
||||||
int size = in.readVInt();
|
int size = in.readVInt();
|
||||||
versions = new ObjectIntOpenHashMap<>(size);
|
versions = new ObjectIntHashMap<>(size);
|
||||||
for (; size > 0; size--) {
|
for (; size > 0; size--) {
|
||||||
versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt());
|
versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt());
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.indices.mapping.put;
|
package org.elasticsearch.action.admin.indices.mapping.put;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import org.elasticsearch.ElasticsearchGenerationException;
|
import org.elasticsearch.ElasticsearchGenerationException;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.IndicesRequest;
|
import org.elasticsearch.action.IndicesRequest;
|
||||||
|
@ -51,7 +51,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||||
*/
|
*/
|
||||||
public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> implements IndicesRequest.Replaceable {
|
public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> implements IndicesRequest.Replaceable {
|
||||||
|
|
||||||
private static ObjectOpenHashSet<String> RESERVED_FIELDS = ObjectOpenHashSet.from(
|
private static ObjectHashSet<String> RESERVED_FIELDS = ObjectHashSet.from(
|
||||||
"_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index",
|
"_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index",
|
||||||
"_size", "_timestamp", "_ttl"
|
"_size", "_timestamp", "_ttl"
|
||||||
);
|
);
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.termvectors;
|
package org.elasticsearch.action.termvectors;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
|
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
|
||||||
import org.apache.lucene.index.Fields;
|
import org.apache.lucene.index.Fields;
|
||||||
import org.apache.lucene.index.PostingsEnum;
|
import org.apache.lucene.index.PostingsEnum;
|
||||||
|
@ -113,7 +113,7 @@ import static org.apache.lucene.util.ArrayUtil.grow;
|
||||||
|
|
||||||
public final class TermVectorsFields extends Fields {
|
public final class TermVectorsFields extends Fields {
|
||||||
|
|
||||||
private final ObjectLongOpenHashMap<String> fieldMap;
|
private final ObjectLongHashMap<String> fieldMap;
|
||||||
private final BytesReference termVectors;
|
private final BytesReference termVectors;
|
||||||
final boolean hasTermStatistic;
|
final boolean hasTermStatistic;
|
||||||
final boolean hasFieldStatistic;
|
final boolean hasFieldStatistic;
|
||||||
|
@ -126,7 +126,7 @@ public final class TermVectorsFields extends Fields {
|
||||||
*/
|
*/
|
||||||
public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
|
public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
|
||||||
BytesStreamInput header = new BytesStreamInput(headerRef);
|
BytesStreamInput header = new BytesStreamInput(headerRef);
|
||||||
fieldMap = new ObjectLongOpenHashMap<>();
|
fieldMap = new ObjectLongHashMap<>();
|
||||||
|
|
||||||
// here we read the header to fill the field offset map
|
// here we read the header to fill the field offset map
|
||||||
String headerString = header.readString();
|
String headerString = header.readString();
|
||||||
|
@ -170,10 +170,11 @@ public final class TermVectorsFields extends Fields {
|
||||||
public Terms terms(String field) throws IOException {
|
public Terms terms(String field) throws IOException {
|
||||||
// first, find where in the termVectors bytes the actual term vector for
|
// first, find where in the termVectors bytes the actual term vector for
|
||||||
// this field is stored
|
// this field is stored
|
||||||
if (!fieldMap.containsKey(field)) {
|
final int keySlot = fieldMap.indexOf(field);
|
||||||
|
if (keySlot < 0) {
|
||||||
return null; // we don't have it.
|
return null; // we don't have it.
|
||||||
}
|
}
|
||||||
long readOffset = fieldMap.lget();
|
long readOffset = fieldMap.indexGet(keySlot);
|
||||||
return new TermVector(termVectors, readOffset);
|
return new TermVector(termVectors, readOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,11 +20,13 @@
|
||||||
package org.elasticsearch.cluster.metadata;
|
package org.elasticsearch.cluster.metadata;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectArrayList;
|
import com.carrotsearch.hppc.ObjectArrayList;
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
import com.google.common.collect.*;
|
import com.google.common.collect.*;
|
||||||
|
|
||||||
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
import org.elasticsearch.cluster.*;
|
import org.elasticsearch.cluster.*;
|
||||||
import org.elasticsearch.action.support.IndicesOptions;
|
import org.elasticsearch.action.support.IndicesOptions;
|
||||||
import org.elasticsearch.cluster.DiffableUtils.KeyedReader;
|
import org.elasticsearch.cluster.DiffableUtils.KeyedReader;
|
||||||
|
@ -295,7 +297,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
||||||
|
|
||||||
boolean matchAllAliases = matchAllAliases(aliases);
|
boolean matchAllAliases = matchAllAliases(aliases);
|
||||||
ImmutableOpenMap.Builder<String, ImmutableList<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
|
ImmutableOpenMap.Builder<String, ImmutableList<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
|
||||||
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys());
|
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||||
for (String index : intersection) {
|
for (String index : intersection) {
|
||||||
IndexMetaData indexMetaData = indices.get(index);
|
IndexMetaData indexMetaData = indices.get(index);
|
||||||
List<AliasMetaData> filteredValues = Lists.newArrayList();
|
List<AliasMetaData> filteredValues = Lists.newArrayList();
|
||||||
|
@ -307,6 +309,13 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!filteredValues.isEmpty()) {
|
if (!filteredValues.isEmpty()) {
|
||||||
|
// Make the list order deterministic
|
||||||
|
CollectionUtil.timSort(filteredValues, new Comparator<AliasMetaData>() {
|
||||||
|
@Override
|
||||||
|
public int compare(AliasMetaData o1, AliasMetaData o2) {
|
||||||
|
return o1.alias().compareTo(o2.alias());
|
||||||
|
}
|
||||||
|
});
|
||||||
mapBuilder.put(index, ImmutableList.copyOf(filteredValues));
|
mapBuilder.put(index, ImmutableList.copyOf(filteredValues));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -337,7 +346,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys());
|
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||||
for (String index : intersection) {
|
for (String index : intersection) {
|
||||||
IndexMetaData indexMetaData = indices.get(index);
|
IndexMetaData indexMetaData = indices.get(index);
|
||||||
List<AliasMetaData> filteredValues = Lists.newArrayList();
|
List<AliasMetaData> filteredValues = Lists.newArrayList();
|
||||||
|
@ -368,7 +377,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
||||||
}
|
}
|
||||||
|
|
||||||
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder = ImmutableOpenMap.builder();
|
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder = ImmutableOpenMap.builder();
|
||||||
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys());
|
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||||
for (String index : intersection) {
|
for (String index : intersection) {
|
||||||
IndexMetaData indexMetaData = indices.get(index);
|
IndexMetaData indexMetaData = indices.get(index);
|
||||||
ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings;
|
ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings;
|
||||||
|
@ -400,7 +409,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData> {
|
||||||
final String[] warmers = Strings.isAllOrWildcard(uncheckedWarmers) ? Strings.EMPTY_ARRAY : uncheckedWarmers;
|
final String[] warmers = Strings.isAllOrWildcard(uncheckedWarmers) ? Strings.EMPTY_ARRAY : uncheckedWarmers;
|
||||||
|
|
||||||
ImmutableOpenMap.Builder<String, ImmutableList<IndexWarmersMetaData.Entry>> mapBuilder = ImmutableOpenMap.builder();
|
ImmutableOpenMap.Builder<String, ImmutableList<IndexWarmersMetaData.Entry>> mapBuilder = ImmutableOpenMap.builder();
|
||||||
Iterable<String> intersection = HppcMaps.intersection(ObjectOpenHashSet.from(concreteIndices), indices.keys());
|
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
|
||||||
for (String index : intersection) {
|
for (String index : intersection) {
|
||||||
IndexMetaData indexMetaData = indices.get(index);
|
IndexMetaData indexMetaData = indices.get(index);
|
||||||
IndexWarmersMetaData indexWarmersMetaData = indexMetaData.custom(IndexWarmersMetaData.TYPE);
|
IndexWarmersMetaData indexWarmersMetaData = indexMetaData.custom(IndexWarmersMetaData.TYPE);
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.node;
|
package org.elasticsearch.cluster.node;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
@ -334,7 +334,7 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
|
||||||
}
|
}
|
||||||
return nodesIds;
|
return nodesIds;
|
||||||
} else {
|
} else {
|
||||||
ObjectOpenHashSet<String> resolvedNodesIds = new ObjectOpenHashSet<>(nodesIds.length);
|
ObjectHashSet<String> resolvedNodesIds = new ObjectHashSet<>(nodesIds.length);
|
||||||
for (String nodeId : nodesIds) {
|
for (String nodeId : nodesIds) {
|
||||||
if (nodeId.equals("_local")) {
|
if (nodeId.equals("_local")) {
|
||||||
String localNodeId = localNodeId();
|
String localNodeId = localNodeId();
|
||||||
|
|
|
@ -25,6 +25,8 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.google.common.collect.UnmodifiableIterator;
|
import com.google.common.collect.UnmodifiableIterator;
|
||||||
|
|
||||||
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
import org.elasticsearch.cluster.AbstractDiffable;
|
import org.elasticsearch.cluster.AbstractDiffable;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
|
@ -35,11 +37,13 @@ import org.elasticsearch.index.shard.ShardId;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ThreadLocalRandom;
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
|
|
||||||
import static com.google.common.collect.Lists.newArrayList;
|
import static com.google.common.collect.Lists.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The {@link IndexRoutingTable} represents routing information for a single
|
* The {@link IndexRoutingTable} represents routing information for a single
|
||||||
|
@ -540,7 +544,26 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
|
||||||
|
|
||||||
public String prettyPrint() {
|
public String prettyPrint() {
|
||||||
StringBuilder sb = new StringBuilder("-- index [" + index + "]\n");
|
StringBuilder sb = new StringBuilder("-- index [" + index + "]\n");
|
||||||
|
|
||||||
|
List<IndexShardRoutingTable> ordered = new ArrayList<>();
|
||||||
for (IndexShardRoutingTable indexShard : this) {
|
for (IndexShardRoutingTable indexShard : this) {
|
||||||
|
ordered.add(indexShard);
|
||||||
|
}
|
||||||
|
|
||||||
|
CollectionUtil.timSort(ordered, new Comparator<IndexShardRoutingTable>() {
|
||||||
|
@Override
|
||||||
|
public int compare(IndexShardRoutingTable o1, IndexShardRoutingTable o2) {
|
||||||
|
int v = o1.shardId().index().name().compareTo(
|
||||||
|
o2.shardId().index().name());
|
||||||
|
if (v == 0) {
|
||||||
|
v = Integer.compare(o1.shardId().id(),
|
||||||
|
o2.shardId().id());
|
||||||
|
}
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
for (IndexShardRoutingTable indexShard : ordered) {
|
||||||
sb.append("----shard_id [").append(indexShard.shardId().index().name()).append("][").append(indexShard.shardId().id()).append("]\n");
|
sb.append("----shard_id [").append(indexShard.shardId().index().name()).append("][").append(indexShard.shardId().id()).append("]\n");
|
||||||
for (ShardRouting shard : indexShard) {
|
for (ShardRouting shard : indexShard) {
|
||||||
sb.append("--------").append(shard.shortSummary()).append("\n");
|
sb.append("--------").append(shard.shortSummary()).append("\n");
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.routing;
|
package org.elasticsearch.cluster.routing;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
import com.google.common.collect.*;
|
import com.google.common.collect.*;
|
||||||
|
@ -64,7 +64,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
|
||||||
|
|
||||||
private Set<ShardId> clearPostAllocationFlag;
|
private Set<ShardId> clearPostAllocationFlag;
|
||||||
|
|
||||||
private final Map<String, ObjectIntOpenHashMap<String>> nodesPerAttributeNames = new HashMap<>();
|
private final Map<String, ObjectIntHashMap<String>> nodesPerAttributeNames = new HashMap<>();
|
||||||
|
|
||||||
public RoutingNodes(ClusterState clusterState) {
|
public RoutingNodes(ClusterState clusterState) {
|
||||||
this.metaData = clusterState.metaData();
|
this.metaData = clusterState.metaData();
|
||||||
|
@ -208,12 +208,12 @@ public class RoutingNodes implements Iterable<RoutingNode> {
|
||||||
return nodesToShards.get(nodeId);
|
return nodesToShards.get(nodeId);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectIntOpenHashMap<String> nodesPerAttributesCounts(String attributeName) {
|
public ObjectIntHashMap<String> nodesPerAttributesCounts(String attributeName) {
|
||||||
ObjectIntOpenHashMap<String> nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName);
|
ObjectIntHashMap<String> nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName);
|
||||||
if (nodesPerAttributesCounts != null) {
|
if (nodesPerAttributesCounts != null) {
|
||||||
return nodesPerAttributesCounts;
|
return nodesPerAttributesCounts;
|
||||||
}
|
}
|
||||||
nodesPerAttributesCounts = new ObjectIntOpenHashMap<>();
|
nodesPerAttributesCounts = new ObjectIntHashMap<>();
|
||||||
for (RoutingNode routingNode : this) {
|
for (RoutingNode routingNode : this) {
|
||||||
String attrValue = routingNode.node().attributes().get(attributeName);
|
String attrValue = routingNode.node().attributes().get(attributeName);
|
||||||
nodesPerAttributesCounts.addTo(attrValue, 1);
|
nodesPerAttributesCounts.addTo(attrValue, 1);
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.routing.allocation.decider;
|
package org.elasticsearch.cluster.routing.allocation.decider;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.routing.MutableShardRouting;
|
import org.elasticsearch.cluster.routing.MutableShardRouting;
|
||||||
|
@ -182,10 +182,10 @@ public class AwarenessAllocationDecider extends AllocationDecider {
|
||||||
}
|
}
|
||||||
|
|
||||||
// build attr_value -> nodes map
|
// build attr_value -> nodes map
|
||||||
ObjectIntOpenHashMap<String> nodesPerAttribute = allocation.routingNodes().nodesPerAttributesCounts(awarenessAttribute);
|
ObjectIntHashMap<String> nodesPerAttribute = allocation.routingNodes().nodesPerAttributesCounts(awarenessAttribute);
|
||||||
|
|
||||||
// build the count of shards per attribute value
|
// build the count of shards per attribute value
|
||||||
ObjectIntOpenHashMap<String> shardPerAttribute = new ObjectIntOpenHashMap<>();
|
ObjectIntHashMap<String> shardPerAttribute = new ObjectIntHashMap<>();
|
||||||
for (MutableShardRouting assignedShard : allocation.routingNodes().assignedShards(shardRouting)) {
|
for (MutableShardRouting assignedShard : allocation.routingNodes().assignedShards(shardRouting)) {
|
||||||
// if the shard is relocating, then make sure we count it as part of the node it is relocating to
|
// if the shard is relocating, then make sure we count it as part of the node it is relocating to
|
||||||
if (assignedShard.relocating()) {
|
if (assignedShard.relocating()) {
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.common;
|
package org.elasticsearch.common;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
*/
|
*/
|
||||||
public class ContextHolder {
|
public class ContextHolder {
|
||||||
|
|
||||||
private ObjectObjectOpenHashMap<Object, Object> context;
|
private ObjectObjectHashMap<Object, Object> context;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Attaches the given value to the context.
|
* Attaches the given value to the context.
|
||||||
|
@ -39,7 +39,7 @@ public class ContextHolder {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public final synchronized <V> V putInContext(Object key, Object value) {
|
public final synchronized <V> V putInContext(Object key, Object value) {
|
||||||
if (context == null) {
|
if (context == null) {
|
||||||
context = new ObjectObjectOpenHashMap<>(2);
|
context = new ObjectObjectHashMap<>(2);
|
||||||
}
|
}
|
||||||
return (V) context.put(key, value);
|
return (V) context.put(key, value);
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,7 @@ public class ContextHolder {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (context == null) {
|
if (context == null) {
|
||||||
context = new ObjectObjectOpenHashMap<>(map);
|
context = new ObjectObjectHashMap<>(map);
|
||||||
} else {
|
} else {
|
||||||
context.putAll(map);
|
context.putAll(map);
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,7 @@ public class ContextHolder {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (context == null) {
|
if (context == null) {
|
||||||
context = new ObjectObjectOpenHashMap<>(other.context);
|
context = new ObjectObjectHashMap<>(other.context);
|
||||||
} else {
|
} else {
|
||||||
context.putAll(other.context);
|
context.putAll(other.context);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,9 +19,9 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.collect;
|
package org.elasticsearch.common.collect;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectLookupContainer;
|
import com.carrotsearch.hppc.ObjectLookupContainer;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
|
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
@ -34,40 +34,50 @@ public final class HppcMaps {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a new map with the given initial capacity
|
* Returns a new map with the given number of expected elements.
|
||||||
|
*
|
||||||
|
* @param expectedElements
|
||||||
|
* The expected number of elements guaranteed not to cause buffer
|
||||||
|
* expansion (inclusive).
|
||||||
*/
|
*/
|
||||||
public static <K, V> ObjectObjectOpenHashMap<K, V> newMap(int capacity) {
|
public static <K, V> ObjectObjectHashMap<K, V> newMap(int expectedElements) {
|
||||||
return new ObjectObjectOpenHashMap<>(capacity);
|
return new ObjectObjectHashMap<>(expectedElements);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a new map with a default initial capacity of
|
* Returns a new map with a default initial capacity.
|
||||||
* {@value com.carrotsearch.hppc.HashContainerUtils#DEFAULT_CAPACITY}
|
|
||||||
*/
|
*/
|
||||||
public static <K, V> ObjectObjectOpenHashMap<K, V> newMap() {
|
public static <K, V> ObjectObjectHashMap<K, V> newMap() {
|
||||||
return newMap(16);
|
return newMap(16);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a map like {@link #newMap()} that does not accept <code>null</code> keys
|
* Returns a map like {@link #newMap()} that does not accept <code>null</code> keys
|
||||||
*/
|
*/
|
||||||
public static <K, V> ObjectObjectOpenHashMap<K, V> newNoNullKeysMap() {
|
public static <K, V> ObjectObjectHashMap<K, V> newNoNullKeysMap() {
|
||||||
return ensureNoNullKeys(16);
|
return ensureNoNullKeys(16);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a map like {@link #newMap(int)} that does not accept <code>null</code> keys
|
* Returns a map like {@link #newMap(int)} that does not accept <code>null</code> keys
|
||||||
|
*
|
||||||
|
* @param expectedElements
|
||||||
|
* The expected number of elements guaranteed not to cause buffer
|
||||||
|
* expansion (inclusive).
|
||||||
*/
|
*/
|
||||||
public static <K, V> ObjectObjectOpenHashMap<K, V> newNoNullKeysMap(int capacity) {
|
public static <K, V> ObjectObjectHashMap<K, V> newNoNullKeysMap(int expectedElements) {
|
||||||
return ensureNoNullKeys(capacity);
|
return ensureNoNullKeys(expectedElements);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wraps the given map and prevent adding of <code>null</code> keys.
|
* Wraps the given map and prevent adding of <code>null</code> keys.
|
||||||
|
*
|
||||||
|
* @param expectedElements
|
||||||
|
* The expected number of elements guaranteed not to cause buffer
|
||||||
|
* expansion (inclusive).
|
||||||
*/
|
*/
|
||||||
public static <K, V> ObjectObjectOpenHashMap<K, V> ensureNoNullKeys(int capacity) {
|
public static <K, V> ObjectObjectHashMap<K, V> ensureNoNullKeys(int expectedElements) {
|
||||||
return new ObjectObjectOpenHashMap<K, V>(capacity) {
|
return new ObjectObjectHashMap<K, V>(expectedElements) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public V put(K key, V value) {
|
public V put(K key, V value) {
|
||||||
if (key == null) {
|
if (key == null) {
|
||||||
|
@ -75,12 +85,11 @@ public final class HppcMaps {
|
||||||
}
|
}
|
||||||
return super.put(key, value);
|
return super.put(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return an intersection view over the two specified containers (which can be KeyContainer or ObjectOpenHashSet).
|
* @return an intersection view over the two specified containers (which can be KeyContainer or ObjectHashSet).
|
||||||
*/
|
*/
|
||||||
// Hppc has forEach, but this means we need to build an intermediate set, with this method we just iterate
|
// Hppc has forEach, but this means we need to build an intermediate set, with this method we just iterate
|
||||||
// over each unique value without creating a third set.
|
// over each unique value without creating a third set.
|
||||||
|
@ -124,12 +133,9 @@ public final class HppcMaps {
|
||||||
}
|
}
|
||||||
|
|
||||||
public final static class Object {
|
public final static class Object {
|
||||||
|
|
||||||
public final static class Integer {
|
public final static class Integer {
|
||||||
|
public static <V> ObjectIntHashMap<V> ensureNoNullKeys(int capacity, float loadFactor) {
|
||||||
public static <V> ObjectIntOpenHashMap<V> ensureNoNullKeys(int capacity, float loadFactor) {
|
return new ObjectIntHashMap<V>(capacity, loadFactor) {
|
||||||
return new ObjectIntOpenHashMap<V>(capacity, loadFactor) {
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int put(V key, int value) {
|
public int put(V key, int value) {
|
||||||
if (key == null) {
|
if (key == null) {
|
||||||
|
@ -139,9 +145,6 @@ public final class HppcMaps {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.*;
|
||||||
import com.carrotsearch.hppc.cursors.IntCursor;
|
import com.carrotsearch.hppc.cursors.IntCursor;
|
||||||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
|
import com.carrotsearch.hppc.predicates.IntObjectPredicate;
|
||||||
import com.carrotsearch.hppc.predicates.IntPredicate;
|
import com.carrotsearch.hppc.predicates.IntPredicate;
|
||||||
import com.carrotsearch.hppc.procedures.IntObjectProcedure;
|
import com.carrotsearch.hppc.procedures.IntObjectProcedure;
|
||||||
import com.google.common.collect.UnmodifiableIterator;
|
import com.google.common.collect.UnmodifiableIterator;
|
||||||
|
@ -38,9 +39,9 @@ import java.util.Map;
|
||||||
*/
|
*/
|
||||||
public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCursor<VType>> {
|
public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCursor<VType>> {
|
||||||
|
|
||||||
private final IntObjectOpenHashMap<VType> map;
|
private final IntObjectHashMap<VType> map;
|
||||||
|
|
||||||
private ImmutableOpenIntMap(IntObjectOpenHashMap<VType> map) {
|
private ImmutableOpenIntMap(IntObjectHashMap<VType> map) {
|
||||||
this.map = map;
|
this.map = map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,7 +176,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private static final ImmutableOpenIntMap EMPTY = new ImmutableOpenIntMap(new IntObjectOpenHashMap());
|
private static final ImmutableOpenIntMap EMPTY = new ImmutableOpenIntMap(new IntObjectHashMap());
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public static <VType> ImmutableOpenIntMap<VType> of() {
|
public static <VType> ImmutableOpenIntMap<VType> of() {
|
||||||
|
@ -196,7 +197,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
||||||
|
|
||||||
public static class Builder<VType> implements IntObjectMap<VType> {
|
public static class Builder<VType> implements IntObjectMap<VType> {
|
||||||
|
|
||||||
private IntObjectOpenHashMap<VType> map;
|
private IntObjectHashMap<VType> map;
|
||||||
|
|
||||||
public Builder() {
|
public Builder() {
|
||||||
//noinspection unchecked
|
//noinspection unchecked
|
||||||
|
@ -204,7 +205,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder(int size) {
|
public Builder(int size) {
|
||||||
this.map = new IntObjectOpenHashMap<>(size);
|
this.map = new IntObjectHashMap<>(size);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder(ImmutableOpenIntMap<VType> map) {
|
public Builder(ImmutableOpenIntMap<VType> map) {
|
||||||
|
@ -215,7 +216,7 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
||||||
* Builds a new instance of the
|
* Builds a new instance of the
|
||||||
*/
|
*/
|
||||||
public ImmutableOpenIntMap<VType> build() {
|
public ImmutableOpenIntMap<VType> build() {
|
||||||
IntObjectOpenHashMap<VType> map = this.map;
|
IntObjectHashMap<VType> map = this.map;
|
||||||
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
||||||
return new ImmutableOpenIntMap<>(map);
|
return new ImmutableOpenIntMap<>(map);
|
||||||
}
|
}
|
||||||
|
@ -325,5 +326,50 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
|
||||||
public ObjectContainer<VType> values() {
|
public ObjectContainer<VType> values() {
|
||||||
return map.values();
|
return map.values();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int removeAll(IntObjectPredicate<? super VType> predicate) {
|
||||||
|
return map.removeAll(predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <T extends IntObjectPredicate<? super VType>> T forEach(T predicate) {
|
||||||
|
return map.forEach(predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int indexOf(int key) {
|
||||||
|
return map.indexOf(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean indexExists(int index) {
|
||||||
|
return map.indexExists(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VType indexGet(int index) {
|
||||||
|
return map.indexGet(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VType indexReplace(int index, VType newValue) {
|
||||||
|
return map.indexReplace(index, newValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void indexInsert(int index, int key, VType value) {
|
||||||
|
map.indexInsert(index, key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
map.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String visualizeKeyDistribution(int characters) {
|
||||||
|
return map.visualizeKeyDistribution(characters);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,8 @@ import com.carrotsearch.hppc.*;
|
||||||
import com.carrotsearch.hppc.cursors.LongCursor;
|
import com.carrotsearch.hppc.cursors.LongCursor;
|
||||||
import com.carrotsearch.hppc.cursors.LongObjectCursor;
|
import com.carrotsearch.hppc.cursors.LongObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
|
import com.carrotsearch.hppc.predicates.IntObjectPredicate;
|
||||||
|
import com.carrotsearch.hppc.predicates.LongObjectPredicate;
|
||||||
import com.carrotsearch.hppc.predicates.LongPredicate;
|
import com.carrotsearch.hppc.predicates.LongPredicate;
|
||||||
import com.carrotsearch.hppc.procedures.LongObjectProcedure;
|
import com.carrotsearch.hppc.procedures.LongObjectProcedure;
|
||||||
import com.google.common.collect.UnmodifiableIterator;
|
import com.google.common.collect.UnmodifiableIterator;
|
||||||
|
@ -38,9 +40,9 @@ import java.util.Map;
|
||||||
*/
|
*/
|
||||||
public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCursor<VType>> {
|
public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCursor<VType>> {
|
||||||
|
|
||||||
private final LongObjectOpenHashMap<VType> map;
|
private final LongObjectHashMap<VType> map;
|
||||||
|
|
||||||
private ImmutableOpenLongMap(LongObjectOpenHashMap<VType> map) {
|
private ImmutableOpenLongMap(LongObjectHashMap<VType> map) {
|
||||||
this.map = map;
|
this.map = map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,7 +177,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private static final ImmutableOpenLongMap EMPTY = new ImmutableOpenLongMap(new LongObjectOpenHashMap());
|
private static final ImmutableOpenLongMap EMPTY = new ImmutableOpenLongMap(new LongObjectHashMap());
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public static <VType> ImmutableOpenLongMap<VType> of() {
|
public static <VType> ImmutableOpenLongMap<VType> of() {
|
||||||
|
@ -196,7 +198,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
||||||
|
|
||||||
public static class Builder<VType> implements LongObjectMap<VType> {
|
public static class Builder<VType> implements LongObjectMap<VType> {
|
||||||
|
|
||||||
private LongObjectOpenHashMap<VType> map;
|
private LongObjectHashMap<VType> map;
|
||||||
|
|
||||||
public Builder() {
|
public Builder() {
|
||||||
//noinspection unchecked
|
//noinspection unchecked
|
||||||
|
@ -204,7 +206,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder(int size) {
|
public Builder(int size) {
|
||||||
this.map = new LongObjectOpenHashMap<>(size);
|
this.map = new LongObjectHashMap<>(size);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder(ImmutableOpenLongMap<VType> map) {
|
public Builder(ImmutableOpenLongMap<VType> map) {
|
||||||
|
@ -215,7 +217,7 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
||||||
* Builds a new instance of the
|
* Builds a new instance of the
|
||||||
*/
|
*/
|
||||||
public ImmutableOpenLongMap<VType> build() {
|
public ImmutableOpenLongMap<VType> build() {
|
||||||
LongObjectOpenHashMap<VType> map = this.map;
|
LongObjectHashMap<VType> map = this.map;
|
||||||
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
||||||
return new ImmutableOpenLongMap<>(map);
|
return new ImmutableOpenLongMap<>(map);
|
||||||
}
|
}
|
||||||
|
@ -311,11 +313,6 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
||||||
return map.removeAll(predicate);
|
return map.removeAll(predicate);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public <T extends LongObjectProcedure<? super VType>> T forEach(T procedure) {
|
|
||||||
return map.forEach(procedure);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public LongCollection keys() {
|
public LongCollection keys() {
|
||||||
return map.keys();
|
return map.keys();
|
||||||
|
@ -325,5 +322,55 @@ public final class ImmutableOpenLongMap<VType> implements Iterable<LongObjectCur
|
||||||
public ObjectContainer<VType> values() {
|
public ObjectContainer<VType> values() {
|
||||||
return map.values();
|
return map.values();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <T extends LongObjectProcedure<? super VType>> T forEach(T procedure) {
|
||||||
|
return map.forEach(procedure);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int indexOf(long key) {
|
||||||
|
return map.indexOf(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean indexExists(int index) {
|
||||||
|
return map.indexExists(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VType indexGet(int index) {
|
||||||
|
return map.indexGet(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VType indexReplace(int index, VType newValue) {
|
||||||
|
return map.indexReplace(index, newValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void indexInsert(int index, long key, VType value) {
|
||||||
|
map.indexInsert(index, key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
map.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String visualizeKeyDistribution(int characters) {
|
||||||
|
return map.visualizeKeyDistribution(characters);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int removeAll(LongObjectPredicate<? super VType> predicate) {
|
||||||
|
return map.removeAll(predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <T extends LongObjectPredicate<? super VType>> T forEach(T predicate) {
|
||||||
|
return map.forEach(predicate);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.common.collect;
|
||||||
import com.carrotsearch.hppc.*;
|
import com.carrotsearch.hppc.*;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
|
import com.carrotsearch.hppc.predicates.ObjectObjectPredicate;
|
||||||
import com.carrotsearch.hppc.predicates.ObjectPredicate;
|
import com.carrotsearch.hppc.predicates.ObjectPredicate;
|
||||||
import com.carrotsearch.hppc.procedures.ObjectObjectProcedure;
|
import com.carrotsearch.hppc.procedures.ObjectObjectProcedure;
|
||||||
import com.google.common.collect.UnmodifiableIterator;
|
import com.google.common.collect.UnmodifiableIterator;
|
||||||
|
@ -37,9 +38,9 @@ import java.util.Map;
|
||||||
*/
|
*/
|
||||||
public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObjectCursor<KType, VType>> {
|
public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObjectCursor<KType, VType>> {
|
||||||
|
|
||||||
private final ObjectObjectOpenHashMap<KType, VType> map;
|
private final ObjectObjectHashMap<KType, VType> map;
|
||||||
|
|
||||||
private ImmutableOpenMap(ObjectObjectOpenHashMap<KType, VType> map) {
|
private ImmutableOpenMap(ObjectObjectHashMap<KType, VType> map) {
|
||||||
this.map = map;
|
this.map = map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,7 +183,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private static final ImmutableOpenMap EMPTY = new ImmutableOpenMap(new ObjectObjectOpenHashMap());
|
private static final ImmutableOpenMap EMPTY = new ImmutableOpenMap(new ObjectObjectHashMap());
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public static <KType, VType> ImmutableOpenMap<KType, VType> of() {
|
public static <KType, VType> ImmutableOpenMap<KType, VType> of() {
|
||||||
|
@ -211,8 +212,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Builder<KType, VType> implements ObjectObjectMap<KType, VType> {
|
public static class Builder<KType, VType> implements ObjectObjectMap<KType, VType> {
|
||||||
|
private ObjectObjectHashMap<KType, VType> map;
|
||||||
private ObjectObjectOpenHashMap<KType, VType> map;
|
|
||||||
|
|
||||||
public Builder() {
|
public Builder() {
|
||||||
//noinspection unchecked
|
//noinspection unchecked
|
||||||
|
@ -220,7 +220,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder(int size) {
|
public Builder(int size) {
|
||||||
this.map = new ObjectObjectOpenHashMap<>(size);
|
this.map = new ObjectObjectHashMap<>(size);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder(ImmutableOpenMap<KType, VType> map) {
|
public Builder(ImmutableOpenMap<KType, VType> map) {
|
||||||
|
@ -231,11 +231,13 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
||||||
* Builds a new instance of the
|
* Builds a new instance of the
|
||||||
*/
|
*/
|
||||||
public ImmutableOpenMap<KType, VType> build() {
|
public ImmutableOpenMap<KType, VType> build() {
|
||||||
ObjectObjectOpenHashMap<KType, VType> map = this.map;
|
ObjectObjectHashMap<KType, VType> map = this.map;
|
||||||
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
this.map = null; // nullify the map, so any operation post build will fail! (hackish, but safest)
|
||||||
return new ImmutableOpenMap<>(map);
|
return new ImmutableOpenMap<>(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Puts all the entries in the map to the builder.
|
* Puts all the entries in the map to the builder.
|
||||||
*/
|
*/
|
||||||
|
@ -313,7 +315,7 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int removeAll(ObjectContainer<? extends KType> container) {
|
public int removeAll(ObjectContainer<? super KType> container) {
|
||||||
return map.removeAll(container);
|
return map.removeAll(container);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -347,5 +349,49 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
|
||||||
return (Builder) this;
|
return (Builder) this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int removeAll(ObjectObjectPredicate<? super KType, ? super VType> predicate) {
|
||||||
|
return map.removeAll(predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <T extends ObjectObjectPredicate<? super KType, ? super VType>> T forEach(T predicate) {
|
||||||
|
return map.forEach(predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int indexOf(KType key) {
|
||||||
|
return map.indexOf(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean indexExists(int index) {
|
||||||
|
return map.indexExists(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VType indexGet(int index) {
|
||||||
|
return map.indexGet(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public VType indexReplace(int index, VType newValue) {
|
||||||
|
return map.indexReplace(index, newValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void indexInsert(int index, KType key, VType value) {
|
||||||
|
map.indexInsert(index, key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
map.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String visualizeKeyDistribution(int characters) {
|
||||||
|
return map.visualizeKeyDistribution(characters);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.lucene.search;
|
package org.elasticsearch.common.lucene.search;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
@ -149,7 +149,7 @@ public class MultiPhrasePrefixQuery extends Query {
|
||||||
}
|
}
|
||||||
Term[] suffixTerms = termArrays.get(sizeMinus1);
|
Term[] suffixTerms = termArrays.get(sizeMinus1);
|
||||||
int position = positions.get(sizeMinus1);
|
int position = positions.get(sizeMinus1);
|
||||||
ObjectOpenHashSet<Term> terms = new ObjectOpenHashSet<>();
|
ObjectHashSet<Term> terms = new ObjectHashSet<>();
|
||||||
for (Term term : suffixTerms) {
|
for (Term term : suffixTerms) {
|
||||||
getPrefixTerms(terms, term, reader);
|
getPrefixTerms(terms, term, reader);
|
||||||
if (terms.size() > maxExpansions) {
|
if (terms.size() > maxExpansions) {
|
||||||
|
@ -163,7 +163,7 @@ public class MultiPhrasePrefixQuery extends Query {
|
||||||
return query.rewrite(reader);
|
return query.rewrite(reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void getPrefixTerms(ObjectOpenHashSet<Term> terms, final Term prefix, final IndexReader reader) throws IOException {
|
private void getPrefixTerms(ObjectHashSet<Term> terms, final Term prefix, final IndexReader reader) throws IOException {
|
||||||
// SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms
|
// SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms
|
||||||
// instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually.
|
// instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually.
|
||||||
List<LeafReaderContext> leaves = reader.leaves();
|
List<LeafReaderContext> leaves = reader.leaves();
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.recycler;
|
package org.elasticsearch.common.recycler;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
import com.carrotsearch.hppc.BitMixer;
|
||||||
import com.google.common.collect.Queues;
|
import com.google.common.collect.Queues;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
|
||||||
|
@ -173,7 +173,7 @@ public enum Recyclers {
|
||||||
final int slot() {
|
final int slot() {
|
||||||
final long id = Thread.currentThread().getId();
|
final long id = Thread.currentThread().getId();
|
||||||
// don't trust Thread.hashCode to have equiprobable low bits
|
// don't trust Thread.hashCode to have equiprobable low bits
|
||||||
int slot = (int) MurmurHash3.hash(id);
|
int slot = (int) BitMixer.mix64(id);
|
||||||
// make positive, otherwise % may return negative numbers
|
// make positive, otherwise % may return negative numbers
|
||||||
slot &= 0x7FFFFFFF;
|
slot &= 0x7FFFFFFF;
|
||||||
slot %= concurrencyLevel;
|
slot %= concurrencyLevel;
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.util;
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
import com.carrotsearch.hppc.BitMixer;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ abstract class AbstractPagedHashMap implements Releasable {
|
||||||
static long hash(long value) {
|
static long hash(long value) {
|
||||||
// Don't use the value directly. Under some cases eg dates, it could be that the low bits don't carry much value and we would like
|
// Don't use the value directly. Under some cases eg dates, it could be that the low bits don't carry much value and we would like
|
||||||
// all bits of the hash to carry as much value
|
// all bits of the hash to carry as much value
|
||||||
return MurmurHash3.hash(value);
|
return BitMixer.mix64(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
static long hash(double value) {
|
static long hash(double value) {
|
||||||
|
|
|
@ -19,7 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.util;
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
import com.carrotsearch.hppc.BitMixer;
|
||||||
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.lease.Releasable;
|
import org.elasticsearch.common.lease.Releasable;
|
||||||
import org.elasticsearch.common.lease.Releasables;
|
import org.elasticsearch.common.lease.Releasables;
|
||||||
|
@ -56,7 +57,7 @@ public final class BytesRefHash extends AbstractHash {
|
||||||
// BytesRef has a weak hashCode function so we try to improve it by rehashing using Murmur3
|
// BytesRef has a weak hashCode function so we try to improve it by rehashing using Murmur3
|
||||||
// Feel free to remove rehashing if BytesRef gets a better hash function
|
// Feel free to remove rehashing if BytesRef gets a better hash function
|
||||||
private static int rehash(int hash) {
|
private static int rehash(int hash) {
|
||||||
return MurmurHash3.hash(hash);
|
return BitMixer.mix32(hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.gateway;
|
package org.elasticsearch.gateway;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.elasticsearch.action.FailedNodeException;
|
import org.elasticsearch.action.FailedNodeException;
|
||||||
|
@ -68,7 +68,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void performStateRecovery(final GatewayStateRecoveredListener listener) throws GatewayException {
|
public void performStateRecovery(final GatewayStateRecoveredListener listener) throws GatewayException {
|
||||||
ObjectOpenHashSet<String> nodesIds = ObjectOpenHashSet.from(clusterService.state().nodes().masterNodes().keys());
|
ObjectHashSet<String> nodesIds = new ObjectHashSet<>(clusterService.state().nodes().masterNodes().keys());
|
||||||
logger.trace("performing state recovery from {}", nodesIds);
|
logger.trace("performing state recovery from {}", nodesIds);
|
||||||
TransportNodesListGatewayMetaState.NodesGatewayMetaState nodesState = listGatewayMetaState.list(nodesIds.toArray(String.class), null).actionGet();
|
TransportNodesListGatewayMetaState.NodesGatewayMetaState nodesState = listGatewayMetaState.list(nodesIds.toArray(String.class), null).actionGet();
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectFloatOpenHashMap<String> indices = new ObjectFloatOpenHashMap<>();
|
ObjectFloatHashMap<String> indices = new ObjectFloatHashMap<>();
|
||||||
MetaData electedGlobalState = null;
|
MetaData electedGlobalState = null;
|
||||||
int found = 0;
|
int found = 0;
|
||||||
for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) {
|
for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) {
|
||||||
|
@ -127,10 +127,11 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
|
||||||
}
|
}
|
||||||
// update the global state, and clean the indices, we elect them in the next phase
|
// update the global state, and clean the indices, we elect them in the next phase
|
||||||
MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState).removeAllIndices();
|
MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState).removeAllIndices();
|
||||||
final boolean[] states = indices.allocated;
|
|
||||||
|
assert !indices.containsKey(null);
|
||||||
final Object[] keys = indices.keys;
|
final Object[] keys = indices.keys;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
String index = (String) keys[i];
|
String index = (String) keys[i];
|
||||||
IndexMetaData electedIndexMetaData = null;
|
IndexMetaData electedIndexMetaData = null;
|
||||||
int indexMetaDataCount = 0;
|
int indexMetaDataCount = 0;
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.gateway;
|
package org.elasticsearch.gateway;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.predicates.ObjectPredicate;
|
import com.carrotsearch.hppc.predicates.ObjectPredicate;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
|
@ -68,7 +68,7 @@ public class GatewayAllocator extends AbstractComponent {
|
||||||
|
|
||||||
private final ConcurrentMap<ShardId, Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData>> cachedStores = ConcurrentCollections.newConcurrentMap();
|
private final ConcurrentMap<ShardId, Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData>> cachedStores = ConcurrentCollections.newConcurrentMap();
|
||||||
|
|
||||||
private final ConcurrentMap<ShardId, ObjectLongOpenHashMap<DiscoveryNode>> cachedShardsState = ConcurrentCollections.newConcurrentMap();
|
private final ConcurrentMap<ShardId, ObjectLongHashMap<DiscoveryNode>> cachedShardsState = ConcurrentCollections.newConcurrentMap();
|
||||||
|
|
||||||
private final TimeValue listTimeout;
|
private final TimeValue listTimeout;
|
||||||
|
|
||||||
|
@ -121,16 +121,17 @@ public class GatewayAllocator extends AbstractComponent {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectLongOpenHashMap<DiscoveryNode> nodesState = buildShardStates(nodes, shard, metaData.index(shard.index()));
|
ObjectLongHashMap<DiscoveryNode> nodesState = buildShardStates(nodes, shard, metaData.index(shard.index()));
|
||||||
|
|
||||||
int numberOfAllocationsFound = 0;
|
int numberOfAllocationsFound = 0;
|
||||||
long highestVersion = -1;
|
long highestVersion = -1;
|
||||||
Set<DiscoveryNode> nodesWithHighestVersion = Sets.newHashSet();
|
Set<DiscoveryNode> nodesWithHighestVersion = Sets.newHashSet();
|
||||||
final boolean[] states = nodesState.allocated;
|
|
||||||
|
assert !nodesState.containsKey(null);
|
||||||
final Object[] keys = nodesState.keys;
|
final Object[] keys = nodesState.keys;
|
||||||
final long[] values = nodesState.values;
|
final long[] values = nodesState.values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (!states[i]) {
|
if (keys[i] == null) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -380,13 +381,13 @@ public class GatewayAllocator extends AbstractComponent {
|
||||||
* A shard on shared storage will return at least shard state 0 for all
|
* A shard on shared storage will return at least shard state 0 for all
|
||||||
* nodes, indicating that the shard can be allocated to any node.
|
* nodes, indicating that the shard can be allocated to any node.
|
||||||
*/
|
*/
|
||||||
private ObjectLongOpenHashMap<DiscoveryNode> buildShardStates(final DiscoveryNodes nodes, MutableShardRouting shard, IndexMetaData indexMetaData) {
|
private ObjectLongHashMap<DiscoveryNode> buildShardStates(final DiscoveryNodes nodes, MutableShardRouting shard, IndexMetaData indexMetaData) {
|
||||||
ObjectLongOpenHashMap<DiscoveryNode> shardStates = cachedShardsState.get(shard.shardId());
|
ObjectLongHashMap<DiscoveryNode> shardStates = cachedShardsState.get(shard.shardId());
|
||||||
ObjectOpenHashSet<String> nodeIds;
|
ObjectHashSet<String> nodeIds;
|
||||||
if (shardStates == null) {
|
if (shardStates == null) {
|
||||||
shardStates = new ObjectLongOpenHashMap<>();
|
shardStates = new ObjectLongHashMap<>();
|
||||||
cachedShardsState.put(shard.shardId(), shardStates);
|
cachedShardsState.put(shard.shardId(), shardStates);
|
||||||
nodeIds = ObjectOpenHashSet.from(nodes.dataNodes().keys());
|
nodeIds = new ObjectHashSet<>(nodes.dataNodes().keys());
|
||||||
} else {
|
} else {
|
||||||
// clean nodes that have failed
|
// clean nodes that have failed
|
||||||
shardStates.keys().removeAll(new ObjectPredicate<DiscoveryNode>() {
|
shardStates.keys().removeAll(new ObjectPredicate<DiscoveryNode>() {
|
||||||
|
@ -395,7 +396,7 @@ public class GatewayAllocator extends AbstractComponent {
|
||||||
return !nodes.nodeExists(node.id());
|
return !nodes.nodeExists(node.id());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
nodeIds = ObjectOpenHashSet.newInstance();
|
nodeIds = new ObjectHashSet<>();
|
||||||
// we have stored cached from before, see if the nodes changed, if they have, go fetch again
|
// we have stored cached from before, see if the nodes changed, if they have, go fetch again
|
||||||
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
|
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
|
||||||
DiscoveryNode node = cursor.value;
|
DiscoveryNode node = cursor.value;
|
||||||
|
@ -442,13 +443,13 @@ public class GatewayAllocator extends AbstractComponent {
|
||||||
|
|
||||||
private Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> buildShardStores(DiscoveryNodes nodes, MutableShardRouting shard) {
|
private Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> buildShardStores(DiscoveryNodes nodes, MutableShardRouting shard) {
|
||||||
Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = cachedStores.get(shard.shardId());
|
Map<DiscoveryNode, TransportNodesListShardStoreMetaData.StoreFilesMetaData> shardStores = cachedStores.get(shard.shardId());
|
||||||
ObjectOpenHashSet<String> nodesIds;
|
ObjectHashSet<String> nodesIds;
|
||||||
if (shardStores == null) {
|
if (shardStores == null) {
|
||||||
shardStores = Maps.newHashMap();
|
shardStores = Maps.newHashMap();
|
||||||
cachedStores.put(shard.shardId(), shardStores);
|
cachedStores.put(shard.shardId(), shardStores);
|
||||||
nodesIds = ObjectOpenHashSet.from(nodes.dataNodes().keys());
|
nodesIds = new ObjectHashSet<>(nodes.dataNodes().keys());
|
||||||
} else {
|
} else {
|
||||||
nodesIds = ObjectOpenHashSet.newInstance();
|
nodesIds = new ObjectHashSet<>();
|
||||||
// clean nodes that have failed
|
// clean nodes that have failed
|
||||||
for (Iterator<DiscoveryNode> it = shardStores.keySet().iterator(); it.hasNext(); ) {
|
for (Iterator<DiscoveryNode> it = shardStores.keySet().iterator(); it.hasNext(); ) {
|
||||||
DiscoveryNode node = it.next();
|
DiscoveryNode node = it.next();
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.index.analysis;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||||
import org.joda.time.format.DateTimeFormatter;
|
import org.joda.time.format.DateTimeFormatter;
|
||||||
|
@ -32,12 +32,12 @@ import java.util.Map;
|
||||||
*/
|
*/
|
||||||
public class NumericDateAnalyzer extends NumericAnalyzer<NumericDateTokenizer> {
|
public class NumericDateAnalyzer extends NumericAnalyzer<NumericDateTokenizer> {
|
||||||
|
|
||||||
private static final Map<String, IntObjectOpenHashMap<NamedAnalyzer>> globalAnalyzers = Maps.newHashMap();
|
private static final Map<String, IntObjectHashMap<NamedAnalyzer>> globalAnalyzers = Maps.newHashMap();
|
||||||
|
|
||||||
public static synchronized NamedAnalyzer buildNamedAnalyzer(FormatDateTimeFormatter formatter, int precisionStep) {
|
public static synchronized NamedAnalyzer buildNamedAnalyzer(FormatDateTimeFormatter formatter, int precisionStep) {
|
||||||
IntObjectOpenHashMap<NamedAnalyzer> precisionMap = globalAnalyzers.get(formatter.format());
|
IntObjectHashMap<NamedAnalyzer> precisionMap = globalAnalyzers.get(formatter.format());
|
||||||
if (precisionMap == null) {
|
if (precisionMap == null) {
|
||||||
precisionMap = new IntObjectOpenHashMap<>();
|
precisionMap = new IntObjectHashMap<>();
|
||||||
globalAnalyzers.put(formatter.format(), precisionMap);
|
globalAnalyzers.put(formatter.format(), precisionMap);
|
||||||
}
|
}
|
||||||
NamedAnalyzer namedAnalyzer = precisionMap.get(precisionStep);
|
NamedAnalyzer namedAnalyzer = precisionMap.get(precisionStep);
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.index.analysis;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -28,10 +28,10 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class NumericDoubleAnalyzer extends NumericAnalyzer<NumericDoubleTokenizer> {
|
public class NumericDoubleAnalyzer extends NumericAnalyzer<NumericDoubleTokenizer> {
|
||||||
|
|
||||||
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn;
|
private final static IntObjectHashMap<NamedAnalyzer> builtIn;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
builtIn = new IntObjectOpenHashMap<>();
|
builtIn = new IntObjectHashMap<>();
|
||||||
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_double/max", AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(Integer.MAX_VALUE)));
|
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_double/max", AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(Integer.MAX_VALUE)));
|
||||||
for (int i = 0; i <= 64; i += 4) {
|
for (int i = 0; i <= 64; i += 4) {
|
||||||
builtIn.put(i, new NamedAnalyzer("_double/" + i, AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(i)));
|
builtIn.put(i, new NamedAnalyzer("_double/" + i, AnalyzerScope.GLOBAL, new NumericDoubleAnalyzer(i)));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.index.analysis;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -28,10 +28,10 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class NumericFloatAnalyzer extends NumericAnalyzer<NumericFloatTokenizer> {
|
public class NumericFloatAnalyzer extends NumericAnalyzer<NumericFloatTokenizer> {
|
||||||
|
|
||||||
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn;
|
private final static IntObjectHashMap<NamedAnalyzer> builtIn;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
builtIn = new IntObjectOpenHashMap<>();
|
builtIn = new IntObjectHashMap<>();
|
||||||
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_float/max", AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(Integer.MAX_VALUE)));
|
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_float/max", AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(Integer.MAX_VALUE)));
|
||||||
for (int i = 0; i <= 64; i += 4) {
|
for (int i = 0; i <= 64; i += 4) {
|
||||||
builtIn.put(i, new NamedAnalyzer("_float/" + i, AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(i)));
|
builtIn.put(i, new NamedAnalyzer("_float/" + i, AnalyzerScope.GLOBAL, new NumericFloatAnalyzer(i)));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.index.analysis;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -28,10 +28,10 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class NumericIntegerAnalyzer extends NumericAnalyzer<NumericIntegerTokenizer> {
|
public class NumericIntegerAnalyzer extends NumericAnalyzer<NumericIntegerTokenizer> {
|
||||||
|
|
||||||
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn;
|
private final static IntObjectHashMap<NamedAnalyzer> builtIn;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
builtIn = new IntObjectOpenHashMap<>();
|
builtIn = new IntObjectHashMap<>();
|
||||||
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_int/max", AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(Integer.MAX_VALUE)));
|
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_int/max", AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(Integer.MAX_VALUE)));
|
||||||
for (int i = 0; i <= 64; i += 4) {
|
for (int i = 0; i <= 64; i += 4) {
|
||||||
builtIn.put(i, new NamedAnalyzer("_int/" + i, AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(i)));
|
builtIn.put(i, new NamedAnalyzer("_int/" + i, AnalyzerScope.GLOBAL, new NumericIntegerAnalyzer(i)));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.index.analysis;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -28,10 +28,10 @@ import java.io.IOException;
|
||||||
*/
|
*/
|
||||||
public class NumericLongAnalyzer extends NumericAnalyzer<NumericLongTokenizer> {
|
public class NumericLongAnalyzer extends NumericAnalyzer<NumericLongTokenizer> {
|
||||||
|
|
||||||
private final static IntObjectOpenHashMap<NamedAnalyzer> builtIn;
|
private final static IntObjectHashMap<NamedAnalyzer> builtIn;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
builtIn = new IntObjectOpenHashMap<>();
|
builtIn = new IntObjectHashMap<>();
|
||||||
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_long/max", AnalyzerScope.GLOBAL, new NumericLongAnalyzer(Integer.MAX_VALUE)));
|
builtIn.put(Integer.MAX_VALUE, new NamedAnalyzer("_long/max", AnalyzerScope.GLOBAL, new NumericLongAnalyzer(Integer.MAX_VALUE)));
|
||||||
for (int i = 0; i <= 64; i += 4) {
|
for (int i = 0; i <= 64; i += 4) {
|
||||||
builtIn.put(i, new NamedAnalyzer("_long/" + i, AnalyzerScope.GLOBAL, new NumericLongAnalyzer(i)));
|
builtIn.put(i, new NamedAnalyzer("_long/" + i, AnalyzerScope.GLOBAL, new NumericLongAnalyzer(i)));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata;
|
package org.elasticsearch.index.fielddata;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -38,13 +38,13 @@ public class FieldDataStats implements Streamable, ToXContent {
|
||||||
long memorySize;
|
long memorySize;
|
||||||
long evictions;
|
long evictions;
|
||||||
@Nullable
|
@Nullable
|
||||||
ObjectLongOpenHashMap<String> fields;
|
ObjectLongHashMap<String> fields;
|
||||||
|
|
||||||
public FieldDataStats() {
|
public FieldDataStats() {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldDataStats(long memorySize, long evictions, @Nullable ObjectLongOpenHashMap<String> fields) {
|
public FieldDataStats(long memorySize, long evictions, @Nullable ObjectLongHashMap<String> fields) {
|
||||||
this.memorySize = memorySize;
|
this.memorySize = memorySize;
|
||||||
this.evictions = evictions;
|
this.evictions = evictions;
|
||||||
this.fields = fields;
|
this.fields = fields;
|
||||||
|
@ -54,17 +54,20 @@ public class FieldDataStats implements Streamable, ToXContent {
|
||||||
this.memorySize += stats.memorySize;
|
this.memorySize += stats.memorySize;
|
||||||
this.evictions += stats.evictions;
|
this.evictions += stats.evictions;
|
||||||
if (stats.fields != null) {
|
if (stats.fields != null) {
|
||||||
if (fields == null) fields = new ObjectLongOpenHashMap<>();
|
if (fields == null) {
|
||||||
final boolean[] states = stats.fields.allocated;
|
fields = stats.fields.clone();
|
||||||
|
} else {
|
||||||
|
assert !stats.fields.containsKey(null);
|
||||||
final Object[] keys = stats.fields.keys;
|
final Object[] keys = stats.fields.keys;
|
||||||
final long[] values = stats.fields.values;
|
final long[] values = stats.fields.values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
fields.addTo((String) keys[i], values[i]);
|
fields.addTo((String) keys[i], values[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public long getMemorySizeInBytes() {
|
public long getMemorySizeInBytes() {
|
||||||
return this.memorySize;
|
return this.memorySize;
|
||||||
|
@ -79,7 +82,7 @@ public class FieldDataStats implements Streamable, ToXContent {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
public ObjectLongOpenHashMap<String> getFields() {
|
public ObjectLongHashMap<String> getFields() {
|
||||||
return fields;
|
return fields;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +98,7 @@ public class FieldDataStats implements Streamable, ToXContent {
|
||||||
evictions = in.readVLong();
|
evictions = in.readVLong();
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
int size = in.readVInt();
|
int size = in.readVInt();
|
||||||
fields = new ObjectLongOpenHashMap<>(size);
|
fields = new ObjectLongHashMap<>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
fields.put(in.readString(), in.readVLong());
|
fields.put(in.readString(), in.readVLong());
|
||||||
}
|
}
|
||||||
|
@ -111,11 +114,11 @@ public class FieldDataStats implements Streamable, ToXContent {
|
||||||
} else {
|
} else {
|
||||||
out.writeBoolean(true);
|
out.writeBoolean(true);
|
||||||
out.writeVInt(fields.size());
|
out.writeVInt(fields.size());
|
||||||
final boolean[] states = fields.allocated;
|
assert !fields.containsKey(null);
|
||||||
final Object[] keys = fields.keys;
|
final Object[] keys = fields.keys;
|
||||||
final long[] values = fields.values;
|
final long[] values = fields.values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
out.writeString((String) keys[i]);
|
out.writeString((String) keys[i]);
|
||||||
out.writeVLong(values[i]);
|
out.writeVLong(values[i]);
|
||||||
}
|
}
|
||||||
|
@ -130,11 +133,11 @@ public class FieldDataStats implements Streamable, ToXContent {
|
||||||
builder.field(Fields.EVICTIONS, getEvictions());
|
builder.field(Fields.EVICTIONS, getEvictions());
|
||||||
if (fields != null) {
|
if (fields != null) {
|
||||||
builder.startObject(Fields.FIELDS);
|
builder.startObject(Fields.FIELDS);
|
||||||
final boolean[] states = fields.allocated;
|
assert !fields.containsKey(null);
|
||||||
final Object[] keys = fields.keys;
|
final Object[] keys = fields.keys;
|
||||||
final long[] values = fields.values;
|
final long[] values = fields.values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE);
|
builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE);
|
||||||
builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, values[i]);
|
builder.byteSizeField(Fields.MEMORY_SIZE_IN_BYTES, Fields.MEMORY_SIZE, values[i]);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata;
|
package org.elasticsearch.index.fielddata;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
import org.apache.lucene.util.Accountable;
|
import org.apache.lucene.util.Accountable;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.metrics.CounterMetric;
|
import org.elasticsearch.common.metrics.CounterMetric;
|
||||||
|
@ -50,9 +50,9 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index
|
||||||
}
|
}
|
||||||
|
|
||||||
public FieldDataStats stats(String... fields) {
|
public FieldDataStats stats(String... fields) {
|
||||||
ObjectLongOpenHashMap<String> fieldTotals = null;
|
ObjectLongHashMap<String> fieldTotals = null;
|
||||||
if (fields != null && fields.length > 0) {
|
if (fields != null && fields.length > 0) {
|
||||||
fieldTotals = new ObjectLongOpenHashMap<>();
|
fieldTotals = new ObjectLongHashMap<>();
|
||||||
for (Map.Entry<String, CounterMetric> entry : perFieldTotals.entrySet()) {
|
for (Map.Entry<String, CounterMetric> entry : perFieldTotals.entrySet()) {
|
||||||
if (Regex.simpleMatch(fields, entry.getKey())) {
|
if (Regex.simpleMatch(fields, entry.getKey())) {
|
||||||
fieldTotals.put(entry.getKey(), entry.getValue().count());
|
fieldTotals.put(entry.getKey(), entry.getValue().count());
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata.plain;
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.google.common.collect.ImmutableSortedSet;
|
import com.google.common.collect.ImmutableSortedSet;
|
||||||
|
|
||||||
|
@ -132,7 +132,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
|
||||||
);
|
);
|
||||||
ParentChildEstimator estimator = new ParentChildEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA), termsEnum);
|
ParentChildEstimator estimator = new ParentChildEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA), termsEnum);
|
||||||
TermsEnum estimatedTermsEnum = estimator.beforeLoad(null);
|
TermsEnum estimatedTermsEnum = estimator.beforeLoad(null);
|
||||||
ObjectObjectOpenHashMap<String, TypeBuilder> typeBuilders = ObjectObjectOpenHashMap.newInstance();
|
ObjectObjectHashMap<String, TypeBuilder> typeBuilders = new ObjectObjectHashMap<>();
|
||||||
try {
|
try {
|
||||||
try {
|
try {
|
||||||
PostingsEnum docsEnum = null;
|
PostingsEnum docsEnum = null;
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
@ -79,7 +79,7 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
||||||
public class MapperService extends AbstractIndexComponent {
|
public class MapperService extends AbstractIndexComponent {
|
||||||
|
|
||||||
public static final String DEFAULT_MAPPING = "_default_";
|
public static final String DEFAULT_MAPPING = "_default_";
|
||||||
private static ObjectOpenHashSet<String> META_FIELDS = ObjectOpenHashSet.from(
|
private static ObjectHashSet<String> META_FIELDS = ObjectHashSet.from(
|
||||||
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
|
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
|
||||||
"_size", "_timestamp", "_ttl"
|
"_size", "_timestamp", "_ttl"
|
||||||
);
|
);
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectObjectMap;
|
import com.carrotsearch.hppc.ObjectObjectMap;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -106,7 +106,7 @@ public abstract class ParseContext {
|
||||||
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */
|
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */
|
||||||
public void addWithKey(Object key, IndexableField field) {
|
public void addWithKey(Object key, IndexableField field) {
|
||||||
if (keyedFields == null) {
|
if (keyedFields == null) {
|
||||||
keyedFields = new ObjectObjectOpenHashMap<>();
|
keyedFields = new ObjectObjectHashMap<>();
|
||||||
} else if (keyedFields.containsKey(key)) {
|
} else if (keyedFields.containsKey(key)) {
|
||||||
throw new IllegalStateException("Only one field can be stored per key");
|
throw new IllegalStateException("Only one field can be stored per key");
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.core;
|
package org.elasticsearch.index.mapper.core;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
import com.google.common.base.Function;
|
import com.google.common.base.Function;
|
||||||
|
|
|
@ -19,7 +19,9 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.core;
|
package org.elasticsearch.index.mapper.core;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.DoubleHashSet;
|
||||||
import com.carrotsearch.hppc.LongArrayList;
|
import com.carrotsearch.hppc.LongArrayList;
|
||||||
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.NumericTokenStream;
|
import org.apache.lucene.analysis.NumericTokenStream;
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.geo;
|
package org.elasticsearch.index.mapper.geo;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.base.Objects;
|
import com.google.common.base.Objects;
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
|
@ -748,11 +748,11 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
|
||||||
TYPE.freeze();
|
TYPE.freeze();
|
||||||
}
|
}
|
||||||
|
|
||||||
private final ObjectOpenHashSet<GeoPoint> points;
|
private final ObjectHashSet<GeoPoint> points;
|
||||||
|
|
||||||
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
|
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
|
||||||
super(name);
|
super(name);
|
||||||
points = new ObjectOpenHashSet<>(2);
|
points = new ObjectHashSet<>(2);
|
||||||
points.add(new GeoPoint(lat, lon));
|
points.add(new GeoPoint(lat, lon));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
|
@ -41,7 +41,7 @@ public class MultiMatchQueryBuilder extends BaseQueryBuilder implements Boostabl
|
||||||
private final Object text;
|
private final Object text;
|
||||||
|
|
||||||
private final List<String> fields;
|
private final List<String> fields;
|
||||||
private ObjectFloatOpenHashMap<String> fieldsBoosts;
|
private ObjectFloatHashMap<String> fieldsBoosts;
|
||||||
|
|
||||||
private MultiMatchQueryBuilder.Type type;
|
private MultiMatchQueryBuilder.Type type;
|
||||||
|
|
||||||
|
@ -176,7 +176,7 @@ public class MultiMatchQueryBuilder extends BaseQueryBuilder implements Boostabl
|
||||||
public MultiMatchQueryBuilder field(String field, float boost) {
|
public MultiMatchQueryBuilder field(String field, float boost) {
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
if (fieldsBoosts == null) {
|
if (fieldsBoosts == null) {
|
||||||
fieldsBoosts = new ObjectFloatOpenHashMap<>();
|
fieldsBoosts = new ObjectFloatHashMap<>();
|
||||||
}
|
}
|
||||||
fieldsBoosts.put(field, boost);
|
fieldsBoosts.put(field, boost);
|
||||||
return this;
|
return this;
|
||||||
|
@ -336,8 +336,9 @@ public class MultiMatchQueryBuilder extends BaseQueryBuilder implements Boostabl
|
||||||
builder.field("query", text);
|
builder.field("query", text);
|
||||||
builder.startArray("fields");
|
builder.startArray("fields");
|
||||||
for (String field : fields) {
|
for (String field : fields) {
|
||||||
if (fieldsBoosts != null && fieldsBoosts.containsKey(field)) {
|
final int keySlot;
|
||||||
field += "^" + fieldsBoosts.lget();
|
if (fieldsBoosts != null && ((keySlot = fieldsBoosts.indexOf(field)) >= 0)) {
|
||||||
|
field += "^" + fieldsBoosts.indexGet(keySlot);
|
||||||
}
|
}
|
||||||
builder.value(field);
|
builder.value(field);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||||
|
|
||||||
import org.elasticsearch.common.unit.Fuzziness;
|
import org.elasticsearch.common.unit.Fuzziness;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -80,7 +80,7 @@ public class QueryStringQueryBuilder extends BaseQueryBuilder implements Boostab
|
||||||
|
|
||||||
private List<String> fields;
|
private List<String> fields;
|
||||||
|
|
||||||
private ObjectFloatOpenHashMap<String> fieldsBoosts;
|
private ObjectFloatHashMap<String> fieldsBoosts;
|
||||||
|
|
||||||
private Boolean useDisMax;
|
private Boolean useDisMax;
|
||||||
|
|
||||||
|
@ -132,7 +132,7 @@ public class QueryStringQueryBuilder extends BaseQueryBuilder implements Boostab
|
||||||
}
|
}
|
||||||
fields.add(field);
|
fields.add(field);
|
||||||
if (fieldsBoosts == null) {
|
if (fieldsBoosts == null) {
|
||||||
fieldsBoosts = new ObjectFloatOpenHashMap<>();
|
fieldsBoosts = new ObjectFloatHashMap<>();
|
||||||
}
|
}
|
||||||
fieldsBoosts.put(field, boost);
|
fieldsBoosts.put(field, boost);
|
||||||
return this;
|
return this;
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.apache.lucene.queryparser.classic.MapperQueryParser;
|
import org.apache.lucene.queryparser.classic.MapperQueryParser;
|
||||||
|
@ -110,7 +110,7 @@ public class QueryStringQueryParser implements QueryParser {
|
||||||
qpSettings.fields().add(field);
|
qpSettings.fields().add(field);
|
||||||
if (fBoost != -1) {
|
if (fBoost != -1) {
|
||||||
if (qpSettings.boosts() == null) {
|
if (qpSettings.boosts() == null) {
|
||||||
qpSettings.boosts(new ObjectFloatOpenHashMap<String>());
|
qpSettings.boosts(new ObjectFloatHashMap<String>());
|
||||||
}
|
}
|
||||||
qpSettings.boosts().put(field, fBoost);
|
qpSettings.boosts().put(field, fBoost);
|
||||||
}
|
}
|
||||||
|
@ -119,7 +119,7 @@ public class QueryStringQueryParser implements QueryParser {
|
||||||
qpSettings.fields().add(fField);
|
qpSettings.fields().add(fField);
|
||||||
if (fBoost != -1) {
|
if (fBoost != -1) {
|
||||||
if (qpSettings.boosts() == null) {
|
if (qpSettings.boosts() == null) {
|
||||||
qpSettings.boosts(new ObjectFloatOpenHashMap<String>());
|
qpSettings.boosts(new ObjectFloatHashMap<String>());
|
||||||
}
|
}
|
||||||
qpSettings.boosts().put(fField, fBoost);
|
qpSettings.boosts().put(fField, fBoost);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices.cache.query;
|
package org.elasticsearch.indices.cache.query;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.ObjectSet;
|
import com.carrotsearch.hppc.ObjectSet;
|
||||||
import com.google.common.cache.Cache;
|
import com.google.common.cache.Cache;
|
||||||
import com.google.common.cache.CacheBuilder;
|
import com.google.common.cache.CacheBuilder;
|
||||||
|
@ -386,8 +386,8 @@ public class IndicesQueryCache extends AbstractComponent implements RemovalListe
|
||||||
|
|
||||||
private class Reaper implements Runnable {
|
private class Reaper implements Runnable {
|
||||||
|
|
||||||
private final ObjectSet<CleanupKey> currentKeysToClean = ObjectOpenHashSet.newInstance();
|
private final ObjectSet<CleanupKey> currentKeysToClean = new ObjectHashSet<>();
|
||||||
private final ObjectSet<IndexShard> currentFullClean = ObjectOpenHashSet.newInstance();
|
private final ObjectSet<IndexShard> currentFullClean = new ObjectHashSet<>();
|
||||||
|
|
||||||
private volatile boolean closed;
|
private volatile boolean closed;
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.indices.cluster;
|
package org.elasticsearch.indices.cluster;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.ObjectContainer;
|
import com.carrotsearch.hppc.ObjectContainer;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
|
@ -262,7 +262,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
||||||
if (routingNode == null) {
|
if (routingNode == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
IntOpenHashSet newShardIds = new IntOpenHashSet();
|
IntHashSet newShardIds = new IntHashSet();
|
||||||
for (IndexService indexService : indicesService) {
|
for (IndexService indexService : indicesService) {
|
||||||
String index = indexService.index().name();
|
String index = indexService.index().name();
|
||||||
IndexMetaData indexMetaData = event.state().metaData().index(index);
|
IndexMetaData indexMetaData = event.state().metaData().index(index);
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.percolator;
|
package org.elasticsearch.percolator;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ByteObjectOpenHashMap;
|
import com.carrotsearch.hppc.ByteObjectHashMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
@ -109,7 +109,7 @@ public class PercolatorService extends AbstractComponent {
|
||||||
public final static String TYPE_NAME = ".percolator";
|
public final static String TYPE_NAME = ".percolator";
|
||||||
|
|
||||||
private final IndicesService indicesService;
|
private final IndicesService indicesService;
|
||||||
private final ByteObjectOpenHashMap<PercolatorType> percolatorTypes;
|
private final ByteObjectHashMap<PercolatorType> percolatorTypes;
|
||||||
private final PageCacheRecycler pageCacheRecycler;
|
private final PageCacheRecycler pageCacheRecycler;
|
||||||
private final BigArrays bigArrays;
|
private final BigArrays bigArrays;
|
||||||
private final ClusterService clusterService;
|
private final ClusterService clusterService;
|
||||||
|
@ -153,7 +153,7 @@ public class PercolatorService extends AbstractComponent {
|
||||||
single = new SingleDocumentPercolatorIndex(cache);
|
single = new SingleDocumentPercolatorIndex(cache);
|
||||||
multi = new MultiDocumentPercolatorIndex(cache);
|
multi = new MultiDocumentPercolatorIndex(cache);
|
||||||
|
|
||||||
percolatorTypes = new ByteObjectOpenHashMap<>(6);
|
percolatorTypes = new ByteObjectHashMap<>(6);
|
||||||
percolatorTypes.put(countPercolator.id(), countPercolator);
|
percolatorTypes.put(countPercolator.id(), countPercolator);
|
||||||
percolatorTypes.put(queryCountPercolator.id(), queryCountPercolator);
|
percolatorTypes.put(queryCountPercolator.id(), queryCountPercolator);
|
||||||
percolatorTypes.put(matchPercolator.id(), matchPercolator);
|
percolatorTypes.put(matchPercolator.id(), matchPercolator);
|
||||||
|
|
|
@ -19,7 +19,9 @@
|
||||||
|
|
||||||
package org.elasticsearch.rest.action.cat;
|
package org.elasticsearch.rest.action.cat;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
|
import com.carrotsearch.hppc.ObjectIntScatterMap;
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||||
|
@ -98,7 +100,7 @@ public class RestAllocationAction extends AbstractCatAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Table buildTable(RestRequest request, final ClusterStateResponse state, final NodesStatsResponse stats) {
|
private Table buildTable(RestRequest request, final ClusterStateResponse state, final NodesStatsResponse stats) {
|
||||||
final ObjectIntOpenHashMap<String> allocs = new ObjectIntOpenHashMap<>();
|
final ObjectIntScatterMap<String> allocs = new ObjectIntScatterMap<>();
|
||||||
|
|
||||||
for (ShardRouting shard : state.getState().routingTable().allShards()) {
|
for (ShardRouting shard : state.getState().routingTable().allShards()) {
|
||||||
String nodeId = "UNASSIGNED";
|
String nodeId = "UNASSIGNED";
|
||||||
|
@ -115,10 +117,7 @@ public class RestAllocationAction extends AbstractCatAction {
|
||||||
for (NodeStats nodeStats : stats.getNodes()) {
|
for (NodeStats nodeStats : stats.getNodes()) {
|
||||||
DiscoveryNode node = nodeStats.getNode();
|
DiscoveryNode node = nodeStats.getNode();
|
||||||
|
|
||||||
int shardCount = 0;
|
int shardCount = allocs.getOrDefault(node.id(), 0);
|
||||||
if (allocs.containsKey(node.id())) {
|
|
||||||
shardCount = allocs.lget();
|
|
||||||
}
|
|
||||||
|
|
||||||
ByteSizeValue total = nodeStats.getFs().getTotal().getTotal();
|
ByteSizeValue total = nodeStats.getFs().getTotal().getTotal();
|
||||||
ByteSizeValue avail = nodeStats.getFs().getTotal().getAvailable();
|
ByteSizeValue avail = nodeStats.getFs().getTotal().getAvailable();
|
||||||
|
@ -144,16 +143,17 @@ public class RestAllocationAction extends AbstractCatAction {
|
||||||
table.endRow();
|
table.endRow();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allocs.containsKey("UNASSIGNED")) {
|
final String UNASSIGNED = "UNASSIGNED";
|
||||||
|
if (allocs.containsKey(UNASSIGNED)) {
|
||||||
table.startRow();
|
table.startRow();
|
||||||
table.addCell(allocs.lget());
|
table.addCell(allocs.get(UNASSIGNED));
|
||||||
table.addCell(null);
|
table.addCell(null);
|
||||||
table.addCell(null);
|
table.addCell(null);
|
||||||
table.addCell(null);
|
table.addCell(null);
|
||||||
table.addCell(null);
|
table.addCell(null);
|
||||||
table.addCell(null);
|
table.addCell(null);
|
||||||
table.addCell(null);
|
table.addCell(null);
|
||||||
table.addCell("UNASSIGNED");
|
table.addCell(UNASSIGNED);
|
||||||
table.endRow();
|
table.endRow();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.rest.action.cat;
|
package org.elasticsearch.rest.action.cat;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongMap;
|
import com.carrotsearch.hppc.ObjectLongMap;
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||||
|
@ -94,7 +94,7 @@ public class RestFielddataAction extends AbstractCatAction {
|
||||||
|
|
||||||
// Collect all the field names so a new table can be built
|
// Collect all the field names so a new table can be built
|
||||||
for (NodeStats ns : nodeStatses.getNodes()) {
|
for (NodeStats ns : nodeStatses.getNodes()) {
|
||||||
ObjectLongOpenHashMap<String> fields = ns.getIndices().getFieldData().getFields();
|
ObjectLongHashMap<String> fields = ns.getIndices().getFieldData().getFields();
|
||||||
nodesFields.put(ns, fields);
|
nodesFields.put(ns, fields);
|
||||||
if (fields != null) {
|
if (fields != null) {
|
||||||
for (String key : fields.keys().toArray(String.class)) {
|
for (String key : fields.keys().toArray(String.class)) {
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search;
|
package org.elasticsearch.search;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.ObjectSet;
|
import com.carrotsearch.hppc.ObjectSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
|
@ -837,7 +837,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||||
public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
|
public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) {
|
||||||
final Loading defaultLoading = Loading.parse(indexMetaData.settings().get(NORMS_LOADING_KEY), Loading.LAZY);
|
final Loading defaultLoading = Loading.parse(indexMetaData.settings().get(NORMS_LOADING_KEY), Loading.LAZY);
|
||||||
final MapperService mapperService = indexShard.mapperService();
|
final MapperService mapperService = indexShard.mapperService();
|
||||||
final ObjectSet<String> warmUp = new ObjectOpenHashSet<>();
|
final ObjectSet<String> warmUp = new ObjectHashSet<>();
|
||||||
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
||||||
for (FieldMapper<?> fieldMapper : docMapper.mappers()) {
|
for (FieldMapper<?> fieldMapper : docMapper.mappers()) {
|
||||||
final String indexName = fieldMapper.names().indexName();
|
final String indexName = fieldMapper.names().indexName();
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.aggregations.bucket.nested;
|
package org.elasticsearch.search.aggregations.bucket.nested;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.LongIntOpenHashMap;
|
import com.carrotsearch.hppc.LongIntHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
@ -77,22 +77,24 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
|
||||||
} else {
|
} else {
|
||||||
parentDocs = docIdSet.bits();
|
parentDocs = docIdSet.bits();
|
||||||
}
|
}
|
||||||
final LongIntOpenHashMap bucketOrdToLastCollectedParentDoc = new LongIntOpenHashMap(32);
|
final LongIntHashMap bucketOrdToLastCollectedParentDoc = new LongIntHashMap(32);
|
||||||
return new LeafBucketCollectorBase(sub, null) {
|
return new LeafBucketCollectorBase(sub, null) {
|
||||||
@Override
|
@Override
|
||||||
public void collect(int childDoc, long bucket) throws IOException {
|
public void collect(int childDoc, long bucket) throws IOException {
|
||||||
// fast forward to retrieve the parentDoc this childDoc belongs to
|
// fast forward to retrieve the parentDoc this childDoc belongs to
|
||||||
final int parentDoc = parentDocs.nextSetBit(childDoc);
|
final int parentDoc = parentDocs.nextSetBit(childDoc);
|
||||||
assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS;
|
assert childDoc <= parentDoc && parentDoc != DocIdSetIterator.NO_MORE_DOCS;
|
||||||
if (bucketOrdToLastCollectedParentDoc.containsKey(bucket)) {
|
|
||||||
int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.lget();
|
int keySlot = bucketOrdToLastCollectedParentDoc.indexOf(bucket);
|
||||||
|
if (bucketOrdToLastCollectedParentDoc.indexExists(keySlot)) {
|
||||||
|
int lastCollectedParentDoc = bucketOrdToLastCollectedParentDoc.indexGet(keySlot);
|
||||||
if (parentDoc > lastCollectedParentDoc) {
|
if (parentDoc > lastCollectedParentDoc) {
|
||||||
collectBucket(sub, parentDoc, bucket);
|
collectBucket(sub, parentDoc, bucket);
|
||||||
bucketOrdToLastCollectedParentDoc.lset(parentDoc);
|
bucketOrdToLastCollectedParentDoc.indexReplace(keySlot, parentDoc);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
collectBucket(sub, parentDoc, bucket);
|
collectBucket(sub, parentDoc, bucket);
|
||||||
bucketOrdToLastCollectedParentDoc.put(bucket, parentDoc);
|
bucketOrdToLastCollectedParentDoc.indexInsert(keySlot, bucket, parentDoc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.aggregations.bucket.terms.support;
|
package org.elasticsearch.search.aggregations.bucket.terms.support;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
import com.carrotsearch.hppc.LongSet;
|
import com.carrotsearch.hppc.LongSet;
|
||||||
|
|
||||||
import org.apache.lucene.index.RandomAccessOrds;
|
import org.apache.lucene.index.RandomAccessOrds;
|
||||||
|
@ -59,10 +59,10 @@ public class IncludeExclude {
|
||||||
|
|
||||||
private LongFilter(int numValids, int numInvalids) {
|
private LongFilter(int numValids, int numInvalids) {
|
||||||
if (numValids > 0) {
|
if (numValids > 0) {
|
||||||
valids = new LongOpenHashSet(numValids);
|
valids = new LongHashSet(numValids);
|
||||||
}
|
}
|
||||||
if (numInvalids > 0) {
|
if (numInvalids > 0) {
|
||||||
invalids = new LongOpenHashSet(numInvalids);
|
invalids = new LongHashSet(numInvalids);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
import com.carrotsearch.hppc.BitMixer;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
@ -375,7 +375,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long valueAt(int index) {
|
public long valueAt(int index) {
|
||||||
return MurmurHash3.hash(values.valueAt(index));
|
return BitMixer.mix64(values.valueAt(index));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -399,7 +399,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long valueAt(int index) {
|
public long valueAt(int index) {
|
||||||
return MurmurHash3.hash(java.lang.Double.doubleToLongBits(values.valueAt(index)));
|
return BitMixer.mix64(java.lang.Double.doubleToLongBits(values.valueAt(index)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.builder;
|
package org.elasticsearch.search.builder;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
|
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
@ -117,7 +117,7 @@ public class SearchSourceBuilder implements ToXContent {
|
||||||
private List<RescoreBuilder> rescoreBuilders;
|
private List<RescoreBuilder> rescoreBuilders;
|
||||||
private Integer defaultRescoreWindowSize;
|
private Integer defaultRescoreWindowSize;
|
||||||
|
|
||||||
private ObjectFloatOpenHashMap<String> indexBoost = null;
|
private ObjectFloatHashMap<String> indexBoost = null;
|
||||||
|
|
||||||
private String[] stats;
|
private String[] stats;
|
||||||
|
|
||||||
|
@ -653,7 +653,7 @@ public class SearchSourceBuilder implements ToXContent {
|
||||||
*/
|
*/
|
||||||
public SearchSourceBuilder indexBoost(String index, float indexBoost) {
|
public SearchSourceBuilder indexBoost(String index, float indexBoost) {
|
||||||
if (this.indexBoost == null) {
|
if (this.indexBoost == null) {
|
||||||
this.indexBoost = new ObjectFloatOpenHashMap<>();
|
this.indexBoost = new ObjectFloatHashMap<>();
|
||||||
}
|
}
|
||||||
this.indexBoost.put(index, indexBoost);
|
this.indexBoost.put(index, indexBoost);
|
||||||
return this;
|
return this;
|
||||||
|
@ -809,11 +809,11 @@ public class SearchSourceBuilder implements ToXContent {
|
||||||
|
|
||||||
if (indexBoost != null) {
|
if (indexBoost != null) {
|
||||||
builder.startObject("indices_boost");
|
builder.startObject("indices_boost");
|
||||||
final boolean[] states = indexBoost.allocated;
|
assert !indexBoost.containsKey(null);
|
||||||
final Object[] keys = indexBoost.keys;
|
final Object[] keys = indexBoost.keys;
|
||||||
final float[] values = indexBoost.values;
|
final float[] values = indexBoost.values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
builder.field((String) keys[i], values[i]);
|
builder.field((String) keys[i], values[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.search.controller;
|
package org.elasticsearch.search.controller;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntArrayList;
|
import com.carrotsearch.hppc.IntArrayList;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
@ -102,8 +102,8 @@ public class SearchPhaseController extends AbstractComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
public AggregatedDfs aggregateDfs(AtomicArray<DfsSearchResult> results) {
|
public AggregatedDfs aggregateDfs(AtomicArray<DfsSearchResult> results) {
|
||||||
ObjectObjectOpenHashMap<Term, TermStatistics> termStatistics = HppcMaps.newNoNullKeysMap();
|
ObjectObjectHashMap<Term, TermStatistics> termStatistics = HppcMaps.newNoNullKeysMap();
|
||||||
ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||||
long aggMaxDoc = 0;
|
long aggMaxDoc = 0;
|
||||||
for (AtomicArray.Entry<DfsSearchResult> lEntry : results.asList()) {
|
for (AtomicArray.Entry<DfsSearchResult> lEntry : results.asList()) {
|
||||||
final Term[] terms = lEntry.value.terms();
|
final Term[] terms = lEntry.value.terms();
|
||||||
|
@ -124,11 +124,12 @@ public class SearchPhaseController extends AbstractComponent {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
final boolean[] states = lEntry.value.fieldStatistics().allocated;
|
|
||||||
|
assert !lEntry.value.fieldStatistics().containsKey(null);
|
||||||
final Object[] keys = lEntry.value.fieldStatistics().keys;
|
final Object[] keys = lEntry.value.fieldStatistics().keys;
|
||||||
final Object[] values = lEntry.value.fieldStatistics().values;
|
final Object[] values = lEntry.value.fieldStatistics().values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
String key = (String) keys[i];
|
String key = (String) keys[i];
|
||||||
CollectionStatistics value = (CollectionStatistics) values[i];
|
CollectionStatistics value = (CollectionStatistics) values[i];
|
||||||
assert key != null;
|
assert key != null;
|
||||||
|
|
|
@ -20,7 +20,9 @@
|
||||||
package org.elasticsearch.search.dfs;
|
package org.elasticsearch.search.dfs;
|
||||||
|
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.CollectionStatistics;
|
import org.apache.lucene.search.CollectionStatistics;
|
||||||
import org.apache.lucene.search.TermStatistics;
|
import org.apache.lucene.search.TermStatistics;
|
||||||
|
@ -33,24 +35,24 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class AggregatedDfs implements Streamable {
|
public class AggregatedDfs implements Streamable {
|
||||||
|
|
||||||
private ObjectObjectOpenHashMap<Term, TermStatistics> termStatistics;
|
private ObjectObjectHashMap<Term, TermStatistics> termStatistics;
|
||||||
private ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics;
|
private ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics;
|
||||||
private long maxDoc;
|
private long maxDoc;
|
||||||
|
|
||||||
private AggregatedDfs() {
|
private AggregatedDfs() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public AggregatedDfs(ObjectObjectOpenHashMap<Term, TermStatistics> termStatistics, ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics, long maxDoc) {
|
public AggregatedDfs(ObjectObjectHashMap<Term, TermStatistics> termStatistics, ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics, long maxDoc) {
|
||||||
this.termStatistics = termStatistics;
|
this.termStatistics = termStatistics;
|
||||||
this.fieldStatistics = fieldStatistics;
|
this.fieldStatistics = fieldStatistics;
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectObjectOpenHashMap<Term, TermStatistics> termStatistics() {
|
public ObjectObjectHashMap<Term, TermStatistics> termStatistics() {
|
||||||
return termStatistics;
|
return termStatistics;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics() {
|
public ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics() {
|
||||||
return fieldStatistics;
|
return fieldStatistics;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,20 +84,17 @@ public class AggregatedDfs implements Streamable {
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(final StreamOutput out) throws IOException {
|
public void writeTo(final StreamOutput out) throws IOException {
|
||||||
out.writeVInt(termStatistics.size());
|
out.writeVInt(termStatistics.size());
|
||||||
final boolean[] states = termStatistics.allocated;
|
|
||||||
final Object[] keys = termStatistics.keys;
|
for (ObjectObjectCursor<Term, TermStatistics> c : termStatistics()) {
|
||||||
final Object[] values = termStatistics.values;
|
Term term = (Term) c.key;
|
||||||
for (int i = 0; i < states.length; i++) {
|
|
||||||
if (states[i]) {
|
|
||||||
Term term = (Term) keys[i];
|
|
||||||
out.writeString(term.field());
|
out.writeString(term.field());
|
||||||
out.writeBytesRef(term.bytes());
|
out.writeBytesRef(term.bytes());
|
||||||
TermStatistics stats = (TermStatistics) values[i];
|
TermStatistics stats = (TermStatistics) c.value;
|
||||||
out.writeBytesRef(stats.term());
|
out.writeBytesRef(stats.term());
|
||||||
out.writeVLong(stats.docFreq());
|
out.writeVLong(stats.docFreq());
|
||||||
out.writeVLong(DfsSearchResult.addOne(stats.totalTermFreq()));
|
out.writeVLong(DfsSearchResult.addOne(stats.totalTermFreq()));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
DfsSearchResult.writeFieldStats(out, fieldStatistics);
|
DfsSearchResult.writeFieldStats(out, fieldStatistics);
|
||||||
out.writeVLong(maxDoc);
|
out.writeVLong(maxDoc);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.dfs;
|
package org.elasticsearch.search.dfs;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.apache.lucene.index.IndexReaderContext;
|
import org.apache.lucene.index.IndexReaderContext;
|
||||||
|
@ -55,7 +55,7 @@ public class DfsPhase implements SearchPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void execute(SearchContext context) {
|
public void execute(SearchContext context) {
|
||||||
final ObjectOpenHashSet<Term> termsSet = new ObjectOpenHashSet<>();
|
final ObjectHashSet<Term> termsSet = new ObjectHashSet<>();
|
||||||
try {
|
try {
|
||||||
if (!context.queryRewritten()) {
|
if (!context.queryRewritten()) {
|
||||||
context.updateRewriteQuery(context.searcher().rewrite(context.query()));
|
context.updateRewriteQuery(context.searcher().rewrite(context.query()));
|
||||||
|
@ -75,7 +75,7 @@ public class DfsPhase implements SearchPhase {
|
||||||
termStatistics[i] = context.searcher().termStatistics(terms[i], termContext);
|
termStatistics[i] = context.searcher().termStatistics(terms[i], termContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||||
for (Term term : terms) {
|
for (Term term : terms) {
|
||||||
assert term.field() != null : "field is null";
|
assert term.field() != null : "field is null";
|
||||||
if (!fieldStatistics.containsKey(term.field())) {
|
if (!fieldStatistics.containsKey(term.field())) {
|
||||||
|
@ -97,9 +97,9 @@ public class DfsPhase implements SearchPhase {
|
||||||
// We need to bridge to JCF world, b/c of Query#extractTerms
|
// We need to bridge to JCF world, b/c of Query#extractTerms
|
||||||
private static class DelegateSet extends AbstractSet<Term> {
|
private static class DelegateSet extends AbstractSet<Term> {
|
||||||
|
|
||||||
private final ObjectOpenHashSet<Term> delegate;
|
private final ObjectHashSet<Term> delegate;
|
||||||
|
|
||||||
private DelegateSet(ObjectOpenHashSet<Term> delegate) {
|
private DelegateSet(ObjectHashSet<Term> delegate) {
|
||||||
this.delegate = delegate;
|
this.delegate = delegate;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,9 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.dfs;
|
package org.elasticsearch.search.dfs;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.CollectionStatistics;
|
import org.apache.lucene.search.CollectionStatistics;
|
||||||
import org.apache.lucene.search.TermStatistics;
|
import org.apache.lucene.search.TermStatistics;
|
||||||
|
@ -45,7 +47,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
||||||
private long id;
|
private long id;
|
||||||
private Term[] terms;
|
private Term[] terms;
|
||||||
private TermStatistics[] termStatistics;
|
private TermStatistics[] termStatistics;
|
||||||
private ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
private ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
|
||||||
private int maxDoc;
|
private int maxDoc;
|
||||||
|
|
||||||
public DfsSearchResult() {
|
public DfsSearchResult() {
|
||||||
|
@ -87,7 +89,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DfsSearchResult fieldStatistics(ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics) {
|
public DfsSearchResult fieldStatistics(ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics) {
|
||||||
this.fieldStatistics = fieldStatistics;
|
this.fieldStatistics = fieldStatistics;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -100,7 +102,7 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
||||||
return termStatistics;
|
return termStatistics;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics() {
|
public ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics() {
|
||||||
return fieldStatistics;
|
return fieldStatistics;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,15 +147,12 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
||||||
out.writeVInt(maxDoc);
|
out.writeVInt(maxDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void writeFieldStats(StreamOutput out, ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics) throws IOException {
|
public static void writeFieldStats(StreamOutput out, ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics) throws IOException {
|
||||||
out.writeVInt(fieldStatistics.size());
|
out.writeVInt(fieldStatistics.size());
|
||||||
final boolean[] states = fieldStatistics.allocated;
|
|
||||||
Object[] keys = fieldStatistics.keys;
|
for (ObjectObjectCursor<String, CollectionStatistics> c : fieldStatistics) {
|
||||||
Object[] values = fieldStatistics.values;
|
out.writeString(c.key);
|
||||||
for (int i = 0; i < states.length; i++) {
|
CollectionStatistics statistics = c.value;
|
||||||
if (states[i]) {
|
|
||||||
out.writeString((String) keys[i]);
|
|
||||||
CollectionStatistics statistics = (CollectionStatistics) values[i];
|
|
||||||
assert statistics.maxDoc() >= 0;
|
assert statistics.maxDoc() >= 0;
|
||||||
out.writeVLong(statistics.maxDoc());
|
out.writeVLong(statistics.maxDoc());
|
||||||
out.writeVLong(addOne(statistics.docCount()));
|
out.writeVLong(addOne(statistics.docCount()));
|
||||||
|
@ -161,7 +160,6 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
||||||
out.writeVLong(addOne(statistics.sumDocFreq()));
|
out.writeVLong(addOne(statistics.sumDocFreq()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
public static void writeTermStats(StreamOutput out, TermStatistics[] termStatistics) throws IOException {
|
public static void writeTermStats(StreamOutput out, TermStatistics[] termStatistics) throws IOException {
|
||||||
out.writeVInt(termStatistics.length);
|
out.writeVInt(termStatistics.length);
|
||||||
|
@ -176,11 +174,11 @@ public class DfsSearchResult extends TransportResponse implements SearchPhaseRes
|
||||||
out.writeVLong(addOne(termStatistic.totalTermFreq()));
|
out.writeVLong(addOne(termStatistic.totalTermFreq()));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ObjectObjectOpenHashMap<String, CollectionStatistics> readFieldStats(StreamInput in) throws IOException {
|
public static ObjectObjectHashMap<String, CollectionStatistics> readFieldStats(StreamInput in) throws IOException {
|
||||||
return readFieldStats(in, null);
|
return readFieldStats(in, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ObjectObjectOpenHashMap<String, CollectionStatistics> readFieldStats(StreamInput in, ObjectObjectOpenHashMap<String, CollectionStatistics> fieldStatistics) throws IOException {
|
public static ObjectObjectHashMap<String, CollectionStatistics> readFieldStats(StreamInput in, ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics) throws IOException {
|
||||||
final int numFieldStatistics = in.readVInt();
|
final int numFieldStatistics = in.readVInt();
|
||||||
if (fieldStatistics == null) {
|
if (fieldStatistics == null) {
|
||||||
fieldStatistics = HppcMaps.newNoNullKeysMap(numFieldStatistics);
|
fieldStatistics = HppcMaps.newNoNullKeysMap(numFieldStatistics);
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.internal;
|
package org.elasticsearch.search.internal;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -51,7 +51,7 @@ public class InternalSearchHits implements SearchHits {
|
||||||
}
|
}
|
||||||
|
|
||||||
private IdentityHashMap<SearchShardTarget, Integer> shardHandleLookup = new IdentityHashMap<>();
|
private IdentityHashMap<SearchShardTarget, Integer> shardHandleLookup = new IdentityHashMap<>();
|
||||||
private IntObjectOpenHashMap<SearchShardTarget> handleShardLookup = new IntObjectOpenHashMap<>();
|
private IntObjectHashMap<SearchShardTarget> handleShardLookup = new IntObjectHashMap<>();
|
||||||
private ShardTargetType streamShardTarget = ShardTargetType.STREAM;
|
private ShardTargetType streamShardTarget = ShardTargetType.STREAM;
|
||||||
|
|
||||||
public StreamContext reset() {
|
public StreamContext reset() {
|
||||||
|
@ -65,7 +65,7 @@ public class InternalSearchHits implements SearchHits {
|
||||||
return shardHandleLookup;
|
return shardHandleLookup;
|
||||||
}
|
}
|
||||||
|
|
||||||
public IntObjectOpenHashMap<SearchShardTarget> handleShardLookup() {
|
public IntObjectHashMap<SearchShardTarget> handleShardLookup() {
|
||||||
return handleShardLookup;
|
return handleShardLookup;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.suggest.completion;
|
package org.elasticsearch.search.suggest.completion;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
|
@ -283,9 +283,9 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider
|
||||||
@Override
|
@Override
|
||||||
public CompletionStats stats(String... fields) {
|
public CompletionStats stats(String... fields) {
|
||||||
long sizeInBytes = 0;
|
long sizeInBytes = 0;
|
||||||
ObjectLongOpenHashMap<String> completionFields = null;
|
ObjectLongHashMap<String> completionFields = null;
|
||||||
if (fields != null && fields.length > 0) {
|
if (fields != null && fields.length > 0) {
|
||||||
completionFields = new ObjectLongOpenHashMap<>(fields.length);
|
completionFields = new ObjectLongHashMap<>(fields.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) {
|
for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) {
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.suggest.completion;
|
package org.elasticsearch.search.suggest.completion;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
@ -38,12 +38,12 @@ public class CompletionStats implements Streamable, ToXContent {
|
||||||
private long sizeInBytes;
|
private long sizeInBytes;
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private ObjectLongOpenHashMap<String> fields;
|
private ObjectLongHashMap<String> fields;
|
||||||
|
|
||||||
public CompletionStats() {
|
public CompletionStats() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public CompletionStats(long size, @Nullable ObjectLongOpenHashMap<String> fields) {
|
public CompletionStats(long size, @Nullable ObjectLongHashMap<String> fields) {
|
||||||
this.sizeInBytes = size;
|
this.sizeInBytes = size;
|
||||||
this.fields = fields;
|
this.fields = fields;
|
||||||
}
|
}
|
||||||
|
@ -56,7 +56,7 @@ public class CompletionStats implements Streamable, ToXContent {
|
||||||
return new ByteSizeValue(sizeInBytes);
|
return new ByteSizeValue(sizeInBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ObjectLongOpenHashMap<String> getFields() {
|
public ObjectLongHashMap<String> getFields() {
|
||||||
return fields;
|
return fields;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ public class CompletionStats implements Streamable, ToXContent {
|
||||||
sizeInBytes = in.readVLong();
|
sizeInBytes = in.readVLong();
|
||||||
if (in.readBoolean()) {
|
if (in.readBoolean()) {
|
||||||
int size = in.readVInt();
|
int size = in.readVInt();
|
||||||
fields = new ObjectLongOpenHashMap<>(size);
|
fields = new ObjectLongHashMap<>(size);
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
fields.put(in.readString(), in.readVLong());
|
fields.put(in.readString(), in.readVLong());
|
||||||
}
|
}
|
||||||
|
@ -80,11 +80,12 @@ public class CompletionStats implements Streamable, ToXContent {
|
||||||
} else {
|
} else {
|
||||||
out.writeBoolean(true);
|
out.writeBoolean(true);
|
||||||
out.writeVInt(fields.size());
|
out.writeVInt(fields.size());
|
||||||
final boolean[] states = fields.allocated;
|
|
||||||
|
assert !fields.containsKey(null);
|
||||||
final Object[] keys = fields.keys;
|
final Object[] keys = fields.keys;
|
||||||
final long[] values = fields.values;
|
final long[] values = fields.values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
out.writeString((String) keys[i]);
|
out.writeString((String) keys[i]);
|
||||||
out.writeVLong(values[i]);
|
out.writeVLong(values[i]);
|
||||||
}
|
}
|
||||||
|
@ -98,11 +99,12 @@ public class CompletionStats implements Streamable, ToXContent {
|
||||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, sizeInBytes);
|
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, sizeInBytes);
|
||||||
if (fields != null) {
|
if (fields != null) {
|
||||||
builder.startObject(Fields.FIELDS);
|
builder.startObject(Fields.FIELDS);
|
||||||
final boolean[] states = fields.allocated;
|
|
||||||
|
assert !fields.containsKey(null);
|
||||||
final Object[] keys = fields.keys;
|
final Object[] keys = fields.keys;
|
||||||
final long[] values = fields.values;
|
final long[] values = fields.values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE);
|
builder.startObject((String) keys[i], XContentBuilder.FieldCaseConversion.NONE);
|
||||||
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, values[i]);
|
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, values[i]);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
@ -135,16 +137,18 @@ public class CompletionStats implements Streamable, ToXContent {
|
||||||
sizeInBytes += completion.getSizeInBytes();
|
sizeInBytes += completion.getSizeInBytes();
|
||||||
|
|
||||||
if (completion.fields != null) {
|
if (completion.fields != null) {
|
||||||
if (fields == null) fields = new ObjectLongOpenHashMap<>();
|
if (fields == null) {
|
||||||
|
fields = completion.fields.clone();
|
||||||
final boolean[] states = completion.fields.allocated;
|
} else {
|
||||||
|
assert !completion.fields.containsKey(null);
|
||||||
final Object[] keys = completion.fields.keys;
|
final Object[] keys = completion.fields.keys;
|
||||||
final long[] values = completion.fields.values;
|
final long[] values = completion.fields.values;
|
||||||
for (int i = 0; i < states.length; i++) {
|
for (int i = 0; i < keys.length; i++) {
|
||||||
if (states[i]) {
|
if (keys[i] != null) {
|
||||||
fields.addTo((String) keys[i], values[i]);
|
fields.addTo((String) keys[i], values[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.suggest.context;
|
package org.elasticsearch.search.suggest.context;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter;
|
import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
@ -368,7 +368,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
||||||
}
|
}
|
||||||
} else if (FIELD_PRECISION.equals(fieldName)) {
|
} else if (FIELD_PRECISION.equals(fieldName)) {
|
||||||
if(parser.nextToken() == Token.START_ARRAY) {
|
if(parser.nextToken() == Token.START_ARRAY) {
|
||||||
IntOpenHashSet precisions = new IntOpenHashSet();
|
IntHashSet precisions = new IntHashSet();
|
||||||
while(parser.nextToken() != Token.END_ARRAY) {
|
while(parser.nextToken() != Token.END_ARRAY) {
|
||||||
precisions.add(parsePrecision(parser));
|
precisions.add(parsePrecision(parser));
|
||||||
}
|
}
|
||||||
|
@ -448,7 +448,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
||||||
|
|
||||||
public static class Builder extends ContextBuilder<GeolocationContextMapping> {
|
public static class Builder extends ContextBuilder<GeolocationContextMapping> {
|
||||||
|
|
||||||
private IntOpenHashSet precisions = new IntOpenHashSet();
|
private IntHashSet precisions = new IntHashSet();
|
||||||
private boolean neighbors; // take neighbor cell on the lowest level into account
|
private boolean neighbors; // take neighbor cell on the lowest level into account
|
||||||
private HashSet<String> defaultLocations = new HashSet<>();
|
private HashSet<String> defaultLocations = new HashSet<>();
|
||||||
private String fieldName = null;
|
private String fieldName = null;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.snapshots;
|
package org.elasticsearch.snapshots;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.IntSet;
|
import com.carrotsearch.hppc.IntSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
|
@ -185,7 +185,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis
|
||||||
snapshotIndexMetaData = updateIndexSettings(snapshotIndexMetaData, request.indexSettings, request.ignoreIndexSettings);
|
snapshotIndexMetaData = updateIndexSettings(snapshotIndexMetaData, request.indexSettings, request.ignoreIndexSettings);
|
||||||
// Check that the index is closed or doesn't exist
|
// Check that the index is closed or doesn't exist
|
||||||
IndexMetaData currentIndexMetaData = currentState.metaData().index(renamedIndex);
|
IndexMetaData currentIndexMetaData = currentState.metaData().index(renamedIndex);
|
||||||
IntSet ignoreShards = new IntOpenHashSet();
|
IntSet ignoreShards = new IntHashSet();
|
||||||
if (currentIndexMetaData == null) {
|
if (currentIndexMetaData == null) {
|
||||||
// Index doesn't exist - create it and start recovery
|
// Index doesn't exist - create it and start recovery
|
||||||
// Make sure that the index we are about to create has a validate name
|
// Make sure that the index we are about to create has a validate name
|
||||||
|
|
|
@ -19,7 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.transport;
|
package org.elasticsearch.transport;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
|
||||||
import org.elasticsearch.common.ContextHolder;
|
import org.elasticsearch.common.ContextHolder;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.termvectors;
|
package org.elasticsearch.action.termvectors;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.payloads.PayloadHelper;
|
import org.apache.lucene.analysis.payloads.PayloadHelper;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
|
@ -493,7 +493,7 @@ public class GetTermVectorsTests extends AbstractTermVectorsTests {
|
||||||
|
|
||||||
private String createString(String[] tokens, Map<String, List<BytesRef>> payloads, int encoding, char delimiter) {
|
private String createString(String[] tokens, Map<String, List<BytesRef>> payloads, int encoding, char delimiter) {
|
||||||
String resultString = "";
|
String resultString = "";
|
||||||
ObjectIntOpenHashMap<String> payloadCounter = new ObjectIntOpenHashMap<>();
|
ObjectIntHashMap<String> payloadCounter = new ObjectIntHashMap<>();
|
||||||
for (String token : tokens) {
|
for (String token : tokens) {
|
||||||
if (!payloadCounter.containsKey(token)) {
|
if (!payloadCounter.containsKey(token)) {
|
||||||
payloadCounter.putIfAbsent(token, 0);
|
payloadCounter.putIfAbsent(token, 0);
|
||||||
|
|
|
@ -601,12 +601,12 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2));
|
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2));
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias2"));
|
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(1), notNullValue());
|
assertThat(getResponse.getAliases().get("foobar").get(1), notNullValue());
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(1).alias(), equalTo("alias1"));
|
assertThat(getResponse.getAliases().get("foobar").get(1).alias(), equalTo("alias2"));
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue());
|
assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue());
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue());
|
assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue());
|
||||||
assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue());
|
assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue());
|
||||||
|
|
|
@ -19,10 +19,10 @@
|
||||||
|
|
||||||
package org.elasticsearch.benchmark.hppc;
|
package org.elasticsearch.benchmark.hppc;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import org.elasticsearch.common.StopWatch;
|
import org.elasticsearch.common.StopWatch;
|
||||||
import org.elasticsearch.common.unit.SizeValue;
|
import org.elasticsearch.common.unit.SizeValue;
|
||||||
|
@ -31,6 +31,12 @@ import java.util.HashMap;
|
||||||
import java.util.IdentityHashMap;
|
import java.util.IdentityHashMap;
|
||||||
import java.util.concurrent.ThreadLocalRandom;
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
|
|
||||||
|
// TODO: these benchmarks aren't too good and may be easily skewed by jit doing
|
||||||
|
// escape analysis/ side-effects/ local
|
||||||
|
// optimisations. Proper benchmarks with JMH (bulk ops, single-shot mode)
|
||||||
|
// should be better here.
|
||||||
|
// https://github.com/carrotsearch/hppc/blob/master/hppc-benchmarks/src/main/java/com/carrotsearch/hppc/benchmarks/B003_HashSet_Contains.java
|
||||||
|
|
||||||
public class StringMapAdjustOrPutBenchmark {
|
public class StringMapAdjustOrPutBenchmark {
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
@ -50,12 +56,12 @@ public class StringMapAdjustOrPutBenchmark {
|
||||||
StopWatch stopWatch;
|
StopWatch stopWatch;
|
||||||
|
|
||||||
stopWatch = new StopWatch().start();
|
stopWatch = new StopWatch().start();
|
||||||
ObjectIntOpenHashMap<String> map = new ObjectIntOpenHashMap<>();
|
ObjectIntHashMap<String> map = new ObjectIntHashMap<>();
|
||||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||||
if (REUSE) {
|
if (REUSE) {
|
||||||
map.clear();
|
map.clear();
|
||||||
} else {
|
} else {
|
||||||
map = new ObjectIntOpenHashMap<>();
|
map = new ObjectIntHashMap<>();
|
||||||
}
|
}
|
||||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||||
map.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
map.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
||||||
|
@ -69,12 +75,12 @@ public class StringMapAdjustOrPutBenchmark {
|
||||||
|
|
||||||
stopWatch = new StopWatch().start();
|
stopWatch = new StopWatch().start();
|
||||||
// TObjectIntCustomHashMap<String> iMap = new TObjectIntCustomHashMap<String>(new StringIdentityHashingStrategy());
|
// TObjectIntCustomHashMap<String> iMap = new TObjectIntCustomHashMap<String>(new StringIdentityHashingStrategy());
|
||||||
ObjectIntOpenHashMap<String> iMap = new ObjectIntOpenHashMap<>();
|
ObjectIntHashMap<String> iMap = new ObjectIntHashMap<>();
|
||||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||||
if (REUSE) {
|
if (REUSE) {
|
||||||
iMap.clear();
|
iMap.clear();
|
||||||
} else {
|
} else {
|
||||||
iMap = new ObjectIntOpenHashMap<>();
|
iMap = new ObjectIntHashMap<>();
|
||||||
}
|
}
|
||||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||||
iMap.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
iMap.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
||||||
|
@ -86,12 +92,12 @@ public class StringMapAdjustOrPutBenchmark {
|
||||||
iMap = null;
|
iMap = null;
|
||||||
|
|
||||||
stopWatch = new StopWatch().start();
|
stopWatch = new StopWatch().start();
|
||||||
iMap = new ObjectIntOpenHashMap<>();
|
iMap = new ObjectIntHashMap<>();
|
||||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||||
if (REUSE) {
|
if (REUSE) {
|
||||||
iMap.clear();
|
iMap.clear();
|
||||||
} else {
|
} else {
|
||||||
iMap = new ObjectIntOpenHashMap<>();
|
iMap = new ObjectIntHashMap<>();
|
||||||
}
|
}
|
||||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||||
iMap.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
iMap.addTo(values[(int) (i % NUMBER_OF_KEYS)], 1);
|
||||||
|
@ -104,12 +110,12 @@ public class StringMapAdjustOrPutBenchmark {
|
||||||
|
|
||||||
// now test with THashMap
|
// now test with THashMap
|
||||||
stopWatch = new StopWatch().start();
|
stopWatch = new StopWatch().start();
|
||||||
ObjectObjectOpenHashMap<String, StringEntry> tMap = new ObjectObjectOpenHashMap<>();
|
ObjectObjectHashMap<String, StringEntry> tMap = new ObjectObjectHashMap<>();
|
||||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||||
if (REUSE) {
|
if (REUSE) {
|
||||||
tMap.clear();
|
tMap.clear();
|
||||||
} else {
|
} else {
|
||||||
tMap = new ObjectObjectOpenHashMap<>();
|
tMap = new ObjectObjectHashMap<>();
|
||||||
}
|
}
|
||||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||||
String key = values[(int) (i % NUMBER_OF_KEYS)];
|
String key = values[(int) (i % NUMBER_OF_KEYS)];
|
||||||
|
@ -187,12 +193,12 @@ public class StringMapAdjustOrPutBenchmark {
|
||||||
}
|
}
|
||||||
|
|
||||||
stopWatch = new StopWatch().start();
|
stopWatch = new StopWatch().start();
|
||||||
IntIntOpenHashMap intMap = new IntIntOpenHashMap();
|
IntIntHashMap intMap = new IntIntHashMap();
|
||||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||||
if (REUSE) {
|
if (REUSE) {
|
||||||
intMap.clear();
|
intMap.clear();
|
||||||
} else {
|
} else {
|
||||||
intMap = new IntIntOpenHashMap();
|
intMap = new IntIntHashMap();
|
||||||
}
|
}
|
||||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||||
int key = iValues[(int) (i % NUMBER_OF_KEYS)];
|
int key = iValues[(int) (i % NUMBER_OF_KEYS)];
|
||||||
|
@ -207,12 +213,12 @@ public class StringMapAdjustOrPutBenchmark {
|
||||||
|
|
||||||
// now test with THashMap
|
// now test with THashMap
|
||||||
stopWatch = new StopWatch().start();
|
stopWatch = new StopWatch().start();
|
||||||
IntObjectOpenHashMap<IntEntry> tIntMap = new IntObjectOpenHashMap<>();
|
IntObjectHashMap<IntEntry> tIntMap = new IntObjectHashMap<>();
|
||||||
for (long iter = 0; iter < ITERATIONS; iter++) {
|
for (long iter = 0; iter < ITERATIONS; iter++) {
|
||||||
if (REUSE) {
|
if (REUSE) {
|
||||||
tIntMap.clear();
|
tIntMap.clear();
|
||||||
} else {
|
} else {
|
||||||
tIntMap = new IntObjectOpenHashMap<>();
|
tIntMap = new IntObjectHashMap<>();
|
||||||
}
|
}
|
||||||
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
for (long i = 0; i < PUT_OPERATIONS; i++) {
|
||||||
int key = iValues[(int) (i % NUMBER_OF_KEYS)];
|
int key = iValues[(int) (i % NUMBER_OF_KEYS)];
|
||||||
|
|
|
@ -18,8 +18,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.benchmark.search.aggregations;
|
package org.elasticsearch.benchmark.search.aggregations;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||||
|
@ -111,7 +111,7 @@ public class GlobalOrdinalsBenchmark {
|
||||||
.endArray()
|
.endArray()
|
||||||
.endObject().endObject())
|
.endObject().endObject())
|
||||||
.get();
|
.get();
|
||||||
ObjectOpenHashSet<String> uniqueTerms = ObjectOpenHashSet.newInstance();
|
ObjectHashSet<String> uniqueTerms = new ObjectHashSet<>();
|
||||||
for (int i = 0; i < FIELD_LIMIT; i++) {
|
for (int i = 0; i < FIELD_LIMIT; i++) {
|
||||||
boolean added;
|
boolean added;
|
||||||
do {
|
do {
|
||||||
|
@ -122,16 +122,11 @@ public class GlobalOrdinalsBenchmark {
|
||||||
uniqueTerms = null;
|
uniqueTerms = null;
|
||||||
|
|
||||||
BulkRequestBuilder builder = client.prepareBulk();
|
BulkRequestBuilder builder = client.prepareBulk();
|
||||||
IntIntOpenHashMap tracker = new IntIntOpenHashMap();
|
IntIntHashMap tracker = new IntIntHashMap();
|
||||||
for (int i = 0; i < COUNT; i++) {
|
for (int i = 0; i < COUNT; i++) {
|
||||||
Map<String, Object> fieldValues = new HashMap<>();
|
Map<String, Object> fieldValues = new HashMap<>();
|
||||||
for (int fieldSuffix = 1; fieldSuffix <= FIELD_LIMIT; fieldSuffix <<= 1) {
|
for (int fieldSuffix = 1; fieldSuffix <= FIELD_LIMIT; fieldSuffix <<= 1) {
|
||||||
int index;
|
int index = tracker.putOrAdd(fieldSuffix, 0, 0);
|
||||||
if (tracker.containsKey(fieldSuffix)) {
|
|
||||||
index = tracker.lget();
|
|
||||||
} else {
|
|
||||||
tracker.put(fieldSuffix, index = 0);
|
|
||||||
}
|
|
||||||
if (index >= fieldSuffix) {
|
if (index >= fieldSuffix) {
|
||||||
index = random.nextInt(fieldSuffix);
|
index = random.nextInt(fieldSuffix);
|
||||||
fieldValues.put("field_" + fieldSuffix, sValues[index]);
|
fieldValues.put("field_" + fieldSuffix, sValues[index]);
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.benchmark.search.aggregations;
|
package org.elasticsearch.benchmark.search.aggregations;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectScatterSet;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ public class SubAggregationSearchCollectModeBenchmark {
|
||||||
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
||||||
lValues[i] = ThreadLocalRandom.current().nextLong();
|
lValues[i] = ThreadLocalRandom.current().nextLong();
|
||||||
}
|
}
|
||||||
ObjectOpenHashSet<String> uniqueTerms = ObjectOpenHashSet.newInstance();
|
ObjectScatterSet<String> uniqueTerms = new ObjectScatterSet<>();
|
||||||
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
||||||
boolean added;
|
boolean added;
|
||||||
do {
|
do {
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.benchmark.search.aggregations;
|
package org.elasticsearch.benchmark.search.aggregations;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectScatterSet;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
|
||||||
|
@ -96,7 +96,7 @@ public class TermsAggregationSearchAndIndexingBenchmark {
|
||||||
Thread.sleep(5000);
|
Thread.sleep(5000);
|
||||||
|
|
||||||
long startTime = System.currentTimeMillis();
|
long startTime = System.currentTimeMillis();
|
||||||
ObjectOpenHashSet<String> uniqueTerms = ObjectOpenHashSet.newInstance();
|
ObjectScatterSet<String> uniqueTerms = new ObjectScatterSet<>();
|
||||||
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
||||||
boolean added;
|
boolean added;
|
||||||
do {
|
do {
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.benchmark.search.aggregations;
|
package org.elasticsearch.benchmark.search.aggregations;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectScatterSet;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||||
import org.elasticsearch.action.bulk.BulkResponse;
|
import org.elasticsearch.action.bulk.BulkResponse;
|
||||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.action.search.SearchType;
|
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.client.Requests;
|
import org.elasticsearch.client.Requests;
|
||||||
import org.elasticsearch.common.StopWatch;
|
import org.elasticsearch.common.StopWatch;
|
||||||
|
@ -161,7 +160,7 @@ public class TermsAggregationSearchBenchmark {
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject())).actionGet();
|
.endObject())).actionGet();
|
||||||
|
|
||||||
ObjectOpenHashSet<String> uniqueTerms = ObjectOpenHashSet.newInstance();
|
ObjectScatterSet<String> uniqueTerms = new ObjectScatterSet<>();
|
||||||
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
for (int i = 0; i < NUMBER_OF_TERMS; i++) {
|
||||||
boolean added;
|
boolean added;
|
||||||
do {
|
do {
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.benchmark.search.child;
|
package org.elasticsearch.benchmark.search.child;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectArrayList;
|
import com.carrotsearch.hppc.ObjectArrayList;
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||||
import org.elasticsearch.action.bulk.BulkResponse;
|
import org.elasticsearch.action.bulk.BulkResponse;
|
||||||
|
@ -49,8 +49,8 @@ public class ParentChildIndexGenerator {
|
||||||
|
|
||||||
public void index() {
|
public void index() {
|
||||||
// Memory intensive...
|
// Memory intensive...
|
||||||
ObjectOpenHashSet<String> usedParentIds = ObjectOpenHashSet.newInstanceWithCapacity(numParents, 0.5f);
|
ObjectHashSet<String> usedParentIds = new ObjectHashSet<>(numParents, 0.5d);
|
||||||
ObjectArrayList<ParentDocument> parents = ObjectArrayList.newInstanceWithCapacity(numParents);
|
ObjectArrayList<ParentDocument> parents = new ObjectArrayList<>(numParents);
|
||||||
|
|
||||||
for (int i = 0; i < numParents; i++) {
|
for (int i = 0; i < numParents; i++) {
|
||||||
String parentId;
|
String parentId;
|
||||||
|
|
|
@ -19,9 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.cluster.allocation;
|
package org.elasticsearch.cluster.allocation;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
|
||||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||||
|
@ -32,7 +31,6 @@ import org.elasticsearch.common.logging.ESLogger;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.discovery.zen.ZenDiscovery;
|
|
||||||
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
|
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
|
||||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||||
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
|
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
|
||||||
|
@ -95,7 +93,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
||||||
logger.info("--> checking current state");
|
logger.info("--> checking current state");
|
||||||
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||||
// verify that we have all the primaries on node3
|
// verify that we have all the primaries on node3
|
||||||
ObjectIntOpenHashMap<String> counts = new ObjectIntOpenHashMap<>();
|
ObjectIntHashMap<String> counts = new ObjectIntHashMap<>();
|
||||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||||
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
||||||
|
@ -133,7 +131,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
||||||
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("4").setWaitForRelocatingShards(0).execute().actionGet();
|
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("4").setWaitForRelocatingShards(0).execute().actionGet();
|
||||||
assertThat(health.isTimedOut(), equalTo(false));
|
assertThat(health.isTimedOut(), equalTo(false));
|
||||||
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||||
ObjectIntOpenHashMap<String> counts = new ObjectIntOpenHashMap<>();
|
ObjectIntHashMap<String> counts = new ObjectIntHashMap<>();
|
||||||
|
|
||||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||||
|
@ -169,7 +167,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
||||||
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").setWaitForRelocatingShards(0).execute().actionGet();
|
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").setWaitForRelocatingShards(0).execute().actionGet();
|
||||||
assertThat(health.isTimedOut(), equalTo(false));
|
assertThat(health.isTimedOut(), equalTo(false));
|
||||||
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||||
ObjectIntOpenHashMap<String> counts = new ObjectIntOpenHashMap<>();
|
ObjectIntHashMap<String> counts = new ObjectIntHashMap<>();
|
||||||
|
|
||||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||||
|
@ -191,7 +189,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(health.isTimedOut(), equalTo(false));
|
assertThat(health.isTimedOut(), equalTo(false));
|
||||||
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||||
|
|
||||||
counts = new ObjectIntOpenHashMap<>();
|
counts = new ObjectIntHashMap<>();
|
||||||
|
|
||||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||||
|
@ -213,7 +211,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(health.isTimedOut(), equalTo(false));
|
assertThat(health.isTimedOut(), equalTo(false));
|
||||||
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||||
|
|
||||||
counts = new ObjectIntOpenHashMap<>();
|
counts = new ObjectIntHashMap<>();
|
||||||
|
|
||||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||||
|
@ -234,7 +232,7 @@ public class AwarenessAllocationTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(health.isTimedOut(), equalTo(false));
|
assertThat(health.isTimedOut(), equalTo(false));
|
||||||
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
|
||||||
|
|
||||||
counts = new ObjectIntOpenHashMap<>();
|
counts = new ObjectIntHashMap<>();
|
||||||
|
|
||||||
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
|
||||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.indices.IndexMissingException;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
|
||||||
import static com.google.common.collect.Sets.newHashSet;
|
import static com.google.common.collect.Sets.newHashSet;
|
||||||
|
@ -67,9 +68,8 @@ public class MetaDataTests extends ElasticsearchTestCase {
|
||||||
assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar"));
|
assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar"));
|
||||||
|
|
||||||
results = md.concreteIndices(options, "foofoobar");
|
results = md.concreteIndices(options, "foofoobar");
|
||||||
assertEquals(2, results.length);
|
assertEquals(new HashSet<>(Arrays.asList("foo", "foobar")),
|
||||||
assertEquals("foo", results[0]);
|
new HashSet<>(Arrays.asList(results)));
|
||||||
assertEquals("foobar", results[1]);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
md.concreteIndices(options, "bar");
|
md.concreteIndices(options, "bar");
|
||||||
|
@ -151,8 +151,8 @@ public class MetaDataTests extends ElasticsearchTestCase {
|
||||||
|
|
||||||
results = md.concreteIndices(options, "foofoobar");
|
results = md.concreteIndices(options, "foofoobar");
|
||||||
assertEquals(2, results.length);
|
assertEquals(2, results.length);
|
||||||
assertEquals("foo", results[0]);
|
assertEquals(new HashSet<>(Arrays.asList("foo", "foobar")),
|
||||||
assertEquals("foobar", results[1]);
|
new HashSet<>(Arrays.asList(results)));
|
||||||
|
|
||||||
results = md.concreteIndices(options, "foo", "bar");
|
results = md.concreteIndices(options, "foo", "bar");
|
||||||
assertEquals(1, results.length);
|
assertEquals(1, results.length);
|
||||||
|
|
|
@ -59,6 +59,7 @@ public class ClusterSerializationTests extends ElasticsearchAllocationTestCase {
|
||||||
ClusterState serializedClusterState = ClusterState.Builder.fromBytes(ClusterState.Builder.toBytes(clusterState), newNode("node1"));
|
ClusterState serializedClusterState = ClusterState.Builder.fromBytes(ClusterState.Builder.toBytes(clusterState), newNode("node1"));
|
||||||
|
|
||||||
assertThat(serializedClusterState.getClusterName().value(), equalTo(clusterState.getClusterName().value()));
|
assertThat(serializedClusterState.getClusterName().value(), equalTo(clusterState.getClusterName().value()));
|
||||||
|
|
||||||
assertThat(serializedClusterState.routingTable().prettyPrint(), equalTo(clusterState.routingTable().prettyPrint()));
|
assertThat(serializedClusterState.routingTable().prettyPrint(), equalTo(clusterState.routingTable().prettyPrint()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.common.hppc;
|
package org.elasticsearch.common.hppc;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectOpenHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import org.elasticsearch.common.collect.HppcMaps;
|
import org.elasticsearch.common.collect.HppcMaps;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -35,48 +35,48 @@ public class HppcMapsTests extends ElasticsearchTestCase {
|
||||||
boolean enabled = false;
|
boolean enabled = false;
|
||||||
assert enabled = true;
|
assert enabled = true;
|
||||||
assumeTrue("assertions enabled", enabled);
|
assumeTrue("assertions enabled", enabled);
|
||||||
ObjectOpenHashSet<String> set1 = ObjectOpenHashSet.from("1", "2", "3");
|
ObjectHashSet<String> set1 = ObjectHashSet.from("1", "2", "3");
|
||||||
ObjectOpenHashSet<String> set2 = ObjectOpenHashSet.from("1", "2", "3");
|
ObjectHashSet<String> set2 = ObjectHashSet.from("1", "2", "3");
|
||||||
List<String> values = toList(HppcMaps.intersection(set1, set2));
|
List<String> values = toList(HppcMaps.intersection(set1, set2));
|
||||||
assertThat(values.size(), equalTo(3));
|
assertThat(values.size(), equalTo(3));
|
||||||
assertThat(values.contains("1"), equalTo(true));
|
assertThat(values.contains("1"), equalTo(true));
|
||||||
assertThat(values.contains("2"), equalTo(true));
|
assertThat(values.contains("2"), equalTo(true));
|
||||||
assertThat(values.contains("3"), equalTo(true));
|
assertThat(values.contains("3"), equalTo(true));
|
||||||
|
|
||||||
set1 = ObjectOpenHashSet.from("1", "2", "3");
|
set1 = ObjectHashSet.from("1", "2", "3");
|
||||||
set2 = ObjectOpenHashSet.from("3", "4", "5");
|
set2 = ObjectHashSet.from("3", "4", "5");
|
||||||
values = toList(HppcMaps.intersection(set1, set2));
|
values = toList(HppcMaps.intersection(set1, set2));
|
||||||
assertThat(values.size(), equalTo(1));
|
assertThat(values.size(), equalTo(1));
|
||||||
assertThat(values.get(0), equalTo("3"));
|
assertThat(values.get(0), equalTo("3"));
|
||||||
|
|
||||||
set1 = ObjectOpenHashSet.from("1", "2", "3");
|
set1 = ObjectHashSet.from("1", "2", "3");
|
||||||
set2 = ObjectOpenHashSet.from("4", "5", "6");
|
set2 = ObjectHashSet.from("4", "5", "6");
|
||||||
values = toList(HppcMaps.intersection(set1, set2));
|
values = toList(HppcMaps.intersection(set1, set2));
|
||||||
assertThat(values.size(), equalTo(0));
|
assertThat(values.size(), equalTo(0));
|
||||||
|
|
||||||
set1 = ObjectOpenHashSet.from();
|
set1 = ObjectHashSet.from();
|
||||||
set2 = ObjectOpenHashSet.from("3", "4", "5");
|
set2 = ObjectHashSet.from("3", "4", "5");
|
||||||
values = toList(HppcMaps.intersection(set1, set2));
|
values = toList(HppcMaps.intersection(set1, set2));
|
||||||
assertThat(values.size(), equalTo(0));
|
assertThat(values.size(), equalTo(0));
|
||||||
|
|
||||||
set1 = ObjectOpenHashSet.from("1", "2", "3");
|
set1 = ObjectHashSet.from("1", "2", "3");
|
||||||
set2 = ObjectOpenHashSet.from();
|
set2 = ObjectHashSet.from();
|
||||||
values = toList(HppcMaps.intersection(set1, set2));
|
values = toList(HppcMaps.intersection(set1, set2));
|
||||||
assertThat(values.size(), equalTo(0));
|
assertThat(values.size(), equalTo(0));
|
||||||
|
|
||||||
set1 = ObjectOpenHashSet.from();
|
set1 = ObjectHashSet.from();
|
||||||
set2 = ObjectOpenHashSet.from();
|
set2 = ObjectHashSet.from();
|
||||||
values = toList(HppcMaps.intersection(set1, set2));
|
values = toList(HppcMaps.intersection(set1, set2));
|
||||||
assertThat(values.size(), equalTo(0));
|
assertThat(values.size(), equalTo(0));
|
||||||
|
|
||||||
set1 = null;
|
set1 = null;
|
||||||
set2 = ObjectOpenHashSet.from();
|
set2 = ObjectHashSet.from();
|
||||||
try {
|
try {
|
||||||
toList(HppcMaps.intersection(set1, set2));
|
toList(HppcMaps.intersection(set1, set2));
|
||||||
fail();
|
fail();
|
||||||
} catch (AssertionError e) {}
|
} catch (AssertionError e) {}
|
||||||
|
|
||||||
set1 = ObjectOpenHashSet.from();
|
set1 = ObjectHashSet.from();
|
||||||
set2 = null;
|
set2 = null;
|
||||||
try {
|
try {
|
||||||
toList(HppcMaps.intersection(set1, set2));
|
toList(HppcMaps.intersection(set1, set2));
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.common.util;
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongMap;
|
import com.carrotsearch.hppc.ObjectLongMap;
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
|
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.BytesRefBuilder;
|
import org.apache.lucene.util.BytesRefBuilder;
|
||||||
|
@ -56,7 +56,7 @@ public class BytesRefHashTests extends ElasticsearchSingleNodeTest {
|
||||||
for (int i = 0; i < values.length; ++i) {
|
for (int i = 0; i < values.length; ++i) {
|
||||||
values[i] = new BytesRef(randomAsciiOfLength(5));
|
values[i] = new BytesRef(randomAsciiOfLength(5));
|
||||||
}
|
}
|
||||||
final ObjectLongMap<BytesRef> valueToId = new ObjectLongOpenHashMap<>();
|
final ObjectLongMap<BytesRef> valueToId = new ObjectLongHashMap<>();
|
||||||
final BytesRef[] idToValue = new BytesRef[values.length];
|
final BytesRef[] idToValue = new BytesRef[values.length];
|
||||||
final int iters = randomInt(1000000);
|
final int iters = randomInt(1000000);
|
||||||
for (int i = 0; i < iters; ++i) {
|
for (int i = 0; i < iters; ++i) {
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.util;
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.LongLongHashMap;
|
||||||
import com.carrotsearch.hppc.LongLongMap;
|
import com.carrotsearch.hppc.LongLongMap;
|
||||||
import com.carrotsearch.hppc.LongLongOpenHashMap;
|
|
||||||
import com.carrotsearch.hppc.cursors.LongLongCursor;
|
import com.carrotsearch.hppc.cursors.LongLongCursor;
|
||||||
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -52,7 +52,7 @@ public class LongHashTests extends ElasticsearchSingleNodeTest {
|
||||||
for (int i = 0; i < values.length; ++i) {
|
for (int i = 0; i < values.length; ++i) {
|
||||||
values[i] = randomLong();
|
values[i] = randomLong();
|
||||||
}
|
}
|
||||||
final LongLongMap valueToId = new LongLongOpenHashMap();
|
final LongLongMap valueToId = new LongLongHashMap();
|
||||||
final long[] idToValue = new long[values.length];
|
final long[] idToValue = new long[values.length];
|
||||||
final int iters = randomInt(1000000);
|
final int iters = randomInt(1000000);
|
||||||
for (int i = 0; i < iters; ++i) {
|
for (int i = 0; i < iters; ++i) {
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.util;
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.LongObjectOpenHashMap;
|
import com.carrotsearch.hppc.LongObjectHashMap;
|
||||||
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ public class LongObjectHashMapTests extends ElasticsearchSingleNodeTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void duel() {
|
public void duel() {
|
||||||
final LongObjectOpenHashMap<Object> map1 = new LongObjectOpenHashMap<>();
|
final LongObjectHashMap<Object> map1 = new LongObjectHashMap<>();
|
||||||
final LongObjectPagedHashMap<Object> map2 = new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays());
|
final LongObjectPagedHashMap<Object> map2 = new LongObjectPagedHashMap<>(randomInt(42), 0.6f + randomFloat() * 0.39f, BigArraysTests.randombigArrays());
|
||||||
final int maxKey = randomIntBetween(1, 10000);
|
final int maxKey = randomIntBetween(1, 10000);
|
||||||
final int iters = scaledRandomIntBetween(10000, 100000);
|
final int iters = scaledRandomIntBetween(10000, 100000);
|
||||||
|
@ -48,7 +48,7 @@ public class LongObjectHashMapTests extends ElasticsearchSingleNodeTest {
|
||||||
for (int i = 0; i <= maxKey; ++i) {
|
for (int i = 0; i <= maxKey; ++i) {
|
||||||
assertSame(map1.get(i), map2.get(i));
|
assertSame(map1.get(i), map2.get(i));
|
||||||
}
|
}
|
||||||
final LongObjectOpenHashMap<Object> copy = new LongObjectOpenHashMap<>();
|
final LongObjectHashMap<Object> copy = new LongObjectHashMap<>();
|
||||||
for (LongObjectPagedHashMap.Cursor<Object> cursor : map2) {
|
for (LongObjectPagedHashMap.Cursor<Object> cursor : map2) {
|
||||||
copy.put(cursor.key, cursor.value);
|
copy.put(cursor.key, cursor.value);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,8 +19,11 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata;
|
package org.elasticsearch.index.fielddata;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.DoubleOpenHashSet;
|
import com.carrotsearch.hppc.DoubleHashSet;
|
||||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
|
import com.carrotsearch.hppc.cursors.DoubleCursor;
|
||||||
|
import com.carrotsearch.hppc.cursors.LongCursor;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.LongField;
|
import org.apache.lucene.document.LongField;
|
||||||
|
@ -324,23 +327,18 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
||||||
public abstract long nextValue(Random r);
|
public abstract long nextValue(Random r);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void test(List<LongOpenHashSet> values) throws Exception {
|
private void test(List<LongHashSet> values) throws Exception {
|
||||||
StringField id = new StringField("_id", "", Field.Store.NO);
|
StringField id = new StringField("_id", "", Field.Store.NO);
|
||||||
|
|
||||||
for (int i = 0; i < values.size(); ++i) {
|
for (int i = 0; i < values.size(); ++i) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
id.setStringValue("" + i);
|
id.setStringValue("" + i);
|
||||||
doc.add(id);
|
doc.add(id);
|
||||||
final LongOpenHashSet v = values.get(i);
|
final LongHashSet v = values.get(i);
|
||||||
final boolean[] states = v.allocated;
|
for (LongCursor c : v) {
|
||||||
final long[] keys = v.keys;
|
LongField value = new LongField("value", c.value, Field.Store.NO);
|
||||||
|
|
||||||
for (int j = 0; j < states.length; j++) {
|
|
||||||
if (states[j]) {
|
|
||||||
LongField value = new LongField("value", keys[j], Field.Store.NO);
|
|
||||||
doc.add(value);
|
doc.add(value);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
writer.forceMerge(1, true);
|
writer.forceMerge(1, true);
|
||||||
|
@ -349,10 +347,10 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
||||||
final AtomicNumericFieldData atomicFieldData = indexFieldData.load(refreshReader());
|
final AtomicNumericFieldData atomicFieldData = indexFieldData.load(refreshReader());
|
||||||
final SortedNumericDocValues data = atomicFieldData.getLongValues();
|
final SortedNumericDocValues data = atomicFieldData.getLongValues();
|
||||||
final SortedNumericDoubleValues doubleData = atomicFieldData.getDoubleValues();
|
final SortedNumericDoubleValues doubleData = atomicFieldData.getDoubleValues();
|
||||||
final LongOpenHashSet set = new LongOpenHashSet();
|
final LongHashSet set = new LongHashSet();
|
||||||
final DoubleOpenHashSet doubleSet = new DoubleOpenHashSet();
|
final DoubleHashSet doubleSet = new DoubleHashSet();
|
||||||
for (int i = 0; i < values.size(); ++i) {
|
for (int i = 0; i < values.size(); ++i) {
|
||||||
final LongOpenHashSet v = values.get(i);
|
final LongHashSet v = values.get(i);
|
||||||
|
|
||||||
data.setDocument(i);
|
data.setDocument(i);
|
||||||
assertThat(data.count() > 0, equalTo(!v.isEmpty()));
|
assertThat(data.count() > 0, equalTo(!v.isEmpty()));
|
||||||
|
@ -367,13 +365,9 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
||||||
}
|
}
|
||||||
assertThat(set, equalTo(v));
|
assertThat(set, equalTo(v));
|
||||||
|
|
||||||
final DoubleOpenHashSet doubleV = new DoubleOpenHashSet();
|
final DoubleHashSet doubleV = new DoubleHashSet();
|
||||||
final boolean[] states = v.allocated;
|
for (LongCursor c : v) {
|
||||||
final long[] keys = v.keys;
|
doubleV.add(c.value);
|
||||||
for (int j = 0; j < states.length; j++) {
|
|
||||||
if (states[j]) {
|
|
||||||
doubleV.add((double) keys[j]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
doubleSet.clear();
|
doubleSet.clear();
|
||||||
doubleData.setDocument(i);
|
doubleData.setDocument(i);
|
||||||
|
@ -394,10 +388,10 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
||||||
private void test(Data data) throws Exception {
|
private void test(Data data) throws Exception {
|
||||||
Random r = getRandom();
|
Random r = getRandom();
|
||||||
final int numDocs = 1000 + r.nextInt(19000);
|
final int numDocs = 1000 + r.nextInt(19000);
|
||||||
final List<LongOpenHashSet> values = new ArrayList<>(numDocs);
|
final List<LongHashSet> values = new ArrayList<>(numDocs);
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
for (int i = 0; i < numDocs; ++i) {
|
||||||
final int numValues = data.numValues(r);
|
final int numValues = data.numValues(r);
|
||||||
final LongOpenHashSet vals = new LongOpenHashSet(numValues);
|
final LongHashSet vals = new LongHashSet(numValues);
|
||||||
for (int j = 0; j < numValues; ++j) {
|
for (int j = 0; j < numValues; ++j) {
|
||||||
vals.add(data.nextValue(r));
|
vals.add(data.nextValue(r));
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
@ -155,10 +155,10 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
|
||||||
childValues[i] = Integer.toString(i);
|
childValues[i] = Integer.toString(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet();
|
IntHashSet filteredOrDeletedDocs = new IntHashSet();
|
||||||
int childDocId = 0;
|
int childDocId = 0;
|
||||||
int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
|
int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
|
||||||
ObjectObjectOpenHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectOpenHashMap<>();
|
ObjectObjectHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectHashMap<>();
|
||||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||||
boolean markParentAsDeleted = rarely();
|
boolean markParentAsDeleted = rarely();
|
||||||
boolean filterMe = rarely();
|
boolean filterMe = rarely();
|
||||||
|
@ -194,7 +194,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
|
||||||
if (!markChildAsDeleted) {
|
if (!markChildAsDeleted) {
|
||||||
NavigableSet<String> parentIds;
|
NavigableSet<String> parentIds;
|
||||||
if (childValueToParentIds.containsKey(childValue)) {
|
if (childValueToParentIds.containsKey(childValue)) {
|
||||||
parentIds = childValueToParentIds.lget();
|
parentIds = childValueToParentIds.get(childValue);
|
||||||
} else {
|
} else {
|
||||||
childValueToParentIds.put(childValue, parentIds = new TreeSet<>());
|
childValueToParentIds.put(childValue, parentIds = new TreeSet<>());
|
||||||
}
|
}
|
||||||
|
@ -271,7 +271,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
|
||||||
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
|
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
|
||||||
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
NavigableSet<String> parentIds = childValueToParentIds.lget();
|
NavigableSet<String> parentIds = childValueToParentIds.get(childValue);
|
||||||
TermsEnum termsEnum = terms.iterator();
|
TermsEnum termsEnum = terms.iterator();
|
||||||
PostingsEnum docsEnum = null;
|
PostingsEnum docsEnum = null;
|
||||||
for (String id : parentIds) {
|
for (String id : parentIds) {
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.FloatArrayList;
|
import com.carrotsearch.hppc.FloatArrayList;
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
@ -131,11 +131,11 @@ public class ChildrenQueryTests extends AbstractChildTests {
|
||||||
childValues[i] = Integer.toString(i);
|
childValues[i] = Integer.toString(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet();
|
IntHashSet filteredOrDeletedDocs = new IntHashSet();
|
||||||
|
|
||||||
int childDocId = 0;
|
int childDocId = 0;
|
||||||
int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
|
int numParentDocs = scaledRandomIntBetween(1, numUniqueChildValues);
|
||||||
ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectOpenHashMap<>();
|
ObjectObjectHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectHashMap<>();
|
||||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||||
boolean markParentAsDeleted = rarely();
|
boolean markParentAsDeleted = rarely();
|
||||||
boolean filterMe = rarely();
|
boolean filterMe = rarely();
|
||||||
|
@ -171,7 +171,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
|
||||||
if (!markChildAsDeleted) {
|
if (!markChildAsDeleted) {
|
||||||
NavigableMap<String, FloatArrayList> parentIdToChildScores;
|
NavigableMap<String, FloatArrayList> parentIdToChildScores;
|
||||||
if (childValueToParentIds.containsKey(childValue)) {
|
if (childValueToParentIds.containsKey(childValue)) {
|
||||||
parentIdToChildScores = childValueToParentIds.lget();
|
parentIdToChildScores = childValueToParentIds.get(childValue);
|
||||||
} else {
|
} else {
|
||||||
childValueToParentIds.put(childValue, parentIdToChildScores = new TreeMap<>());
|
childValueToParentIds.put(childValue, parentIdToChildScores = new TreeMap<>());
|
||||||
}
|
}
|
||||||
|
@ -255,7 +255,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
|
||||||
final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()];
|
final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()];
|
||||||
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
NavigableMap<String, FloatArrayList> parentIdToChildScores = childValueToParentIds.lget();
|
NavigableMap<String, FloatArrayList> parentIdToChildScores = childValueToParentIds.get(childValue);
|
||||||
TermsEnum termsEnum = terms.iterator();
|
TermsEnum termsEnum = terms.iterator();
|
||||||
PostingsEnum docsEnum = null;
|
PostingsEnum docsEnum = null;
|
||||||
for (Map.Entry<String, FloatArrayList> entry : parentIdToChildScores.entrySet()) {
|
for (Map.Entry<String, FloatArrayList> entry : parentIdToChildScores.entrySet()) {
|
||||||
|
|
|
@ -18,8 +18,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
@ -112,8 +112,8 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
|
||||||
|
|
||||||
int childDocId = 0;
|
int childDocId = 0;
|
||||||
int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
|
int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
|
||||||
ObjectObjectOpenHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectOpenHashMap<>();
|
ObjectObjectHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectHashMap<>();
|
||||||
IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap();
|
IntIntHashMap childIdToParentId = new IntIntHashMap();
|
||||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||||
boolean markParentAsDeleted = rarely();
|
boolean markParentAsDeleted = rarely();
|
||||||
String parentValue = parentValues[random().nextInt(parentValues.length)];
|
String parentValue = parentValues[random().nextInt(parentValues.length)];
|
||||||
|
@ -152,7 +152,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
|
||||||
if (!markParentAsDeleted) {
|
if (!markParentAsDeleted) {
|
||||||
NavigableSet<String> childIds;
|
NavigableSet<String> childIds;
|
||||||
if (parentValueToChildDocIds.containsKey(parentValue)) {
|
if (parentValueToChildDocIds.containsKey(parentValue)) {
|
||||||
childIds = parentValueToChildDocIds.lget();
|
childIds = parentValueToChildDocIds.get(parentValue);
|
||||||
} else {
|
} else {
|
||||||
parentValueToChildDocIds.put(parentValue, childIds = new TreeSet<>());
|
parentValueToChildDocIds.put(parentValue, childIds = new TreeSet<>());
|
||||||
}
|
}
|
||||||
|
@ -222,7 +222,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
|
||||||
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
|
LeafReader slowLeafReader = SlowCompositeReaderWrapper.wrap(indexReader);
|
||||||
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
NavigableSet<String> childIds = parentValueToChildDocIds.lget();
|
NavigableSet<String> childIds = parentValueToChildDocIds.get(parentValue);
|
||||||
TermsEnum termsEnum = terms.iterator();
|
TermsEnum termsEnum = terms.iterator();
|
||||||
PostingsEnum docsEnum = null;
|
PostingsEnum docsEnum = null;
|
||||||
for (String id : childIds) {
|
for (String id : childIds) {
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
package org.elasticsearch.index.search.child;
|
package org.elasticsearch.index.search.child;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.FloatArrayList;
|
import com.carrotsearch.hppc.FloatArrayList;
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.MockAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
@ -116,8 +116,8 @@ public class ParentQueryTests extends AbstractChildTests {
|
||||||
|
|
||||||
int childDocId = 0;
|
int childDocId = 0;
|
||||||
int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
|
int numParentDocs = scaledRandomIntBetween(1, numUniqueParentValues);
|
||||||
ObjectObjectOpenHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectOpenHashMap<>();
|
ObjectObjectHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectHashMap<>();
|
||||||
IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap();
|
IntIntHashMap childIdToParentId = new IntIntHashMap();
|
||||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||||
boolean markParentAsDeleted = rarely();
|
boolean markParentAsDeleted = rarely();
|
||||||
String parentValue = parentValues[random().nextInt(parentValues.length)];
|
String parentValue = parentValues[random().nextInt(parentValues.length)];
|
||||||
|
@ -153,10 +153,8 @@ public class ParentQueryTests extends AbstractChildTests {
|
||||||
indexWriter.addDocument(document);
|
indexWriter.addDocument(document);
|
||||||
|
|
||||||
if (!markParentAsDeleted) {
|
if (!markParentAsDeleted) {
|
||||||
NavigableMap<String, Float> childIdToScore;
|
NavigableMap<String, Float> childIdToScore = parentValueToChildIds.getOrDefault(parentValue, null);
|
||||||
if (parentValueToChildIds.containsKey(parentValue)) {
|
if (childIdToScore == null) {
|
||||||
childIdToScore = parentValueToChildIds.lget();
|
|
||||||
} else {
|
|
||||||
parentValueToChildIds.put(parentValue, childIdToScore = new TreeMap<>());
|
parentValueToChildIds.put(parentValue, childIdToScore = new TreeMap<>());
|
||||||
}
|
}
|
||||||
if (!markChildAsDeleted && !filterMe) {
|
if (!markChildAsDeleted && !filterMe) {
|
||||||
|
@ -226,7 +224,7 @@ public class ParentQueryTests extends AbstractChildTests {
|
||||||
final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()];
|
final FloatArrayList[] scores = new FloatArrayList[slowLeafReader.maxDoc()];
|
||||||
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
Terms terms = slowLeafReader.terms(UidFieldMapper.NAME);
|
||||||
if (terms != null) {
|
if (terms != null) {
|
||||||
NavigableMap<String, Float> childIdsAndScore = parentValueToChildIds.lget();
|
NavigableMap<String, Float> childIdsAndScore = parentValueToChildIds.get(parentValue);
|
||||||
TermsEnum termsEnum = terms.iterator();
|
TermsEnum termsEnum = terms.iterator();
|
||||||
PostingsEnum docsEnum = null;
|
PostingsEnum docsEnum = null;
|
||||||
for (Map.Entry<String, Float> entry : childIdsAndScore.entrySet()) {
|
for (Map.Entry<String, Float> entry : childIdsAndScore.entrySet()) {
|
||||||
|
|
|
@ -711,27 +711,27 @@ public class IndexStatsTests extends ElasticsearchIntegrationTest {
|
||||||
stats = builder.setFieldDataFields("bar").execute().actionGet();
|
stats = builder.setFieldDataFields("bar").execute().actionGet();
|
||||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(false));
|
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(false));
|
||||||
|
|
||||||
stats = builder.setFieldDataFields("bar", "baz").execute().actionGet();
|
stats = builder.setFieldDataFields("bar", "baz").execute().actionGet();
|
||||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(true));
|
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(true));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0l));
|
||||||
|
|
||||||
stats = builder.setFieldDataFields("*").execute().actionGet();
|
stats = builder.setFieldDataFields("*").execute().actionGet();
|
||||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(true));
|
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(true));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getFields().get("baz"), greaterThan(0l));
|
||||||
|
|
||||||
stats = builder.setFieldDataFields("*r").execute().actionGet();
|
stats = builder.setFieldDataFields("*r").execute().actionGet();
|
||||||
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getMemorySizeInBytes(), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
assertThat(stats.getTotal().fieldData.getFields().containsKey("bar"), is(true));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().fieldData.getFields().get("bar"), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(false));
|
assertThat(stats.getTotal().fieldData.getFields().containsKey("baz"), is(false));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -758,27 +758,27 @@ public class IndexStatsTests extends ElasticsearchIntegrationTest {
|
||||||
stats = builder.setCompletionFields("bar.completion").execute().actionGet();
|
stats = builder.setCompletionFields("bar.completion").execute().actionGet();
|
||||||
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
||||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(false));
|
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(false));
|
||||||
|
|
||||||
stats = builder.setCompletionFields("bar.completion", "baz.completion").execute().actionGet();
|
stats = builder.setCompletionFields("bar.completion", "baz.completion").execute().actionGet();
|
||||||
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
||||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(true));
|
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(true));
|
||||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getFields().get("baz.completion"), greaterThan(0l));
|
||||||
|
|
||||||
stats = builder.setCompletionFields("*").execute().actionGet();
|
stats = builder.setCompletionFields("*").execute().actionGet();
|
||||||
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
||||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(true));
|
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(true));
|
||||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getFields().get("baz.completion"), greaterThan(0l));
|
||||||
|
|
||||||
stats = builder.setCompletionFields("*r*").execute().actionGet();
|
stats = builder.setCompletionFields("*r*").execute().actionGet();
|
||||||
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getSizeInBytes(), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
assertThat(stats.getTotal().completion.getFields().containsKey("bar.completion"), is(true));
|
||||||
assertThat(stats.getTotal().completion.getFields().lget(), greaterThan(0l));
|
assertThat(stats.getTotal().completion.getFields().get("bar.completion"), greaterThan(0l));
|
||||||
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(false));
|
assertThat(stats.getTotal().completion.getFields().containsKey("baz.completion"), is(false));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.recovery;
|
package org.elasticsearch.recovery;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.procedures.IntProcedure;
|
import com.carrotsearch.hppc.procedures.IntProcedure;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
|
@ -229,7 +229,7 @@ public class RelocationTests extends ElasticsearchIntegrationTest {
|
||||||
for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) {
|
for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) {
|
||||||
hitIds[hit] = hit + 1;
|
hitIds[hit] = hit + 1;
|
||||||
}
|
}
|
||||||
IntOpenHashSet set = IntOpenHashSet.from(hitIds);
|
IntHashSet set = IntHashSet.from(hitIds);
|
||||||
for (SearchHit hit : hits.hits()) {
|
for (SearchHit hit : hits.hits()) {
|
||||||
int id = Integer.parseInt(hit.id());
|
int id = Integer.parseInt(hit.id());
|
||||||
if (!set.remove(id)) {
|
if (!set.remove(id)) {
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.search.aggregations;
|
package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntIntMap;
|
import com.carrotsearch.hppc.IntIntMap;
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
|
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
|
@ -60,7 +60,7 @@ public class CombiTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
createIndex("idx");
|
createIndex("idx");
|
||||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)];
|
IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)];
|
||||||
IntIntMap values = new IntIntOpenHashMap();
|
IntIntMap values = new IntIntHashMap();
|
||||||
long missingValues = 0;
|
long missingValues = 0;
|
||||||
for (int i = 0; i < builders.length; i++) {
|
for (int i = 0; i < builders.length; i++) {
|
||||||
String name = "name_" + randomIntBetween(1, 10);
|
String name = "name_" + randomIntBetween(1, 10);
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations;
|
package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
|
|
||||||
|
@ -174,7 +174,7 @@ public class EquivalenceTests extends ElasticsearchIntegrationTest {
|
||||||
final int numDocs = scaledRandomIntBetween(1000, 2000);
|
final int numDocs = scaledRandomIntBetween(1000, 2000);
|
||||||
final int maxNumTerms = randomIntBetween(10, 5000);
|
final int maxNumTerms = randomIntBetween(10, 5000);
|
||||||
|
|
||||||
final IntOpenHashSet valuesSet = new IntOpenHashSet();
|
final IntHashSet valuesSet = new IntHashSet();
|
||||||
cluster().wipeIndices("idx");
|
cluster().wipeIndices("idx");
|
||||||
prepareCreate("idx")
|
prepareCreate("idx")
|
||||||
.addMapping("type", jsonBuilder().startObject()
|
.addMapping("type", jsonBuilder().startObject()
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
package org.elasticsearch.search.aggregations;
|
package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntIntMap;
|
import com.carrotsearch.hppc.IntIntMap;
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
|
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
|
||||||
|
@ -52,7 +52,7 @@ public class MetaDataTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
createIndex("idx");
|
createIndex("idx");
|
||||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)];
|
IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)];
|
||||||
IntIntMap values = new IntIntOpenHashMap();
|
IntIntMap values = new IntIntHashMap();
|
||||||
long missingValues = 0;
|
long missingValues = 0;
|
||||||
for (int i = 0; i < builders.length; i++) {
|
for (int i = 0; i < builders.length; i++) {
|
||||||
String name = "name_" + randomIntBetween(1, 10);
|
String name = "name_" + randomIntBetween(1, 10);
|
||||||
|
|
|
@ -18,8 +18,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.aggregations.bucket;
|
package org.elasticsearch.search.aggregations.bucket;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||||
import com.carrotsearch.hppc.ObjectIntMap;
|
import com.carrotsearch.hppc.ObjectIntMap;
|
||||||
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
||||||
|
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
|
@ -81,7 +81,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
List<IndexRequestBuilder> cities = new ArrayList<>();
|
List<IndexRequestBuilder> cities = new ArrayList<>();
|
||||||
Random random = getRandom();
|
Random random = getRandom();
|
||||||
expectedDocCountsForGeoHash = new ObjectIntOpenHashMap<>(numDocs * 2);
|
expectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
//generate random point
|
//generate random point
|
||||||
double lat = (180d * random.nextDouble()) - 90d;
|
double lat = (180d * random.nextDouble()) - 90d;
|
||||||
|
@ -105,7 +105,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
|
||||||
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed"));
|
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed"));
|
||||||
|
|
||||||
cities = new ArrayList<>();
|
cities = new ArrayList<>();
|
||||||
multiValuedExpectedDocCountsForGeoHash = new ObjectIntOpenHashMap<>(numDocs * 2);
|
multiValuedExpectedDocCountsForGeoHash = new ObjectIntHashMap<>(numDocs * 2);
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
final int numPoints = random.nextInt(4);
|
final int numPoints = random.nextInt(4);
|
||||||
List<String> points = new ArrayList<>();
|
List<String> points = new ArrayList<>();
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.aggregations.bucket;
|
package org.elasticsearch.search.aggregations.bucket;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
|
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||||
|
@ -270,7 +270,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(histo.getName(), equalTo("histo"));
|
assertThat(histo.getName(), equalTo("histo"));
|
||||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||||
|
|
||||||
LongOpenHashSet buckets = new LongOpenHashSet();
|
LongHashSet buckets = new LongHashSet();
|
||||||
// TODO: use diamond once JI-9019884 is fixed
|
// TODO: use diamond once JI-9019884 is fixed
|
||||||
List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||||
long previousCount = Long.MIN_VALUE;
|
long previousCount = Long.MIN_VALUE;
|
||||||
|
@ -300,7 +300,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(histo.getName(), equalTo("histo"));
|
assertThat(histo.getName(), equalTo("histo"));
|
||||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||||
|
|
||||||
LongOpenHashSet buckets = new LongOpenHashSet();
|
LongHashSet buckets = new LongHashSet();
|
||||||
// TODO: use diamond once JI-9019884 is fixed
|
// TODO: use diamond once JI-9019884 is fixed
|
||||||
List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
List<Histogram.Bucket> histoBuckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||||
long previousCount = Long.MAX_VALUE;
|
long previousCount = Long.MAX_VALUE;
|
||||||
|
@ -407,7 +407,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(histo.getName(), equalTo("histo"));
|
assertThat(histo.getName(), equalTo("histo"));
|
||||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||||
|
|
||||||
LongOpenHashSet visited = new LongOpenHashSet();
|
LongHashSet visited = new LongHashSet();
|
||||||
double previousSum = Double.NEGATIVE_INFINITY;
|
double previousSum = Double.NEGATIVE_INFINITY;
|
||||||
// TODO: use diamond once JI-9019884 is fixed
|
// TODO: use diamond once JI-9019884 is fixed
|
||||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||||
|
@ -448,7 +448,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(histo.getName(), equalTo("histo"));
|
assertThat(histo.getName(), equalTo("histo"));
|
||||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||||
|
|
||||||
LongOpenHashSet visited = new LongOpenHashSet();
|
LongHashSet visited = new LongHashSet();
|
||||||
double previousSum = Double.POSITIVE_INFINITY;
|
double previousSum = Double.POSITIVE_INFINITY;
|
||||||
// TODO: use diamond once JI-9019884 is fixed
|
// TODO: use diamond once JI-9019884 is fixed
|
||||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||||
|
@ -489,7 +489,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(histo.getName(), equalTo("histo"));
|
assertThat(histo.getName(), equalTo("histo"));
|
||||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||||
|
|
||||||
LongOpenHashSet visited = new LongOpenHashSet();
|
LongHashSet visited = new LongHashSet();
|
||||||
double previousSum = Double.NEGATIVE_INFINITY;
|
double previousSum = Double.NEGATIVE_INFINITY;
|
||||||
// TODO: use diamond once JI-9019884 is fixed
|
// TODO: use diamond once JI-9019884 is fixed
|
||||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||||
|
@ -530,7 +530,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(histo.getName(), equalTo("histo"));
|
assertThat(histo.getName(), equalTo("histo"));
|
||||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||||
|
|
||||||
LongOpenHashSet visited = new LongOpenHashSet();
|
LongHashSet visited = new LongHashSet();
|
||||||
double previousSum = Double.POSITIVE_INFINITY;
|
double previousSum = Double.POSITIVE_INFINITY;
|
||||||
// TODO: use diamond once JI-9019884 is fixed
|
// TODO: use diamond once JI-9019884 is fixed
|
||||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||||
|
@ -573,7 +573,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(histo.getName(), equalTo("histo"));
|
assertThat(histo.getName(), equalTo("histo"));
|
||||||
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
|
||||||
|
|
||||||
LongOpenHashSet visited = new LongOpenHashSet();
|
LongHashSet visited = new LongHashSet();
|
||||||
double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
|
double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
|
||||||
// TODO: use diamond once JI-9019884 is fixed
|
// TODO: use diamond once JI-9019884 is fixed
|
||||||
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
List<Histogram.Bucket> buckets = new ArrayList<Histogram.Bucket>(histo.getBuckets());
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.bucket;
|
package org.elasticsearch.search.aggregations.bucket;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
import com.carrotsearch.hppc.LongSet;
|
import com.carrotsearch.hppc.LongSet;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ public class MinDocCountTests extends AbstractTermsTests {
|
||||||
cardinality = randomIntBetween(8, 30);
|
cardinality = randomIntBetween(8, 30);
|
||||||
final List<IndexRequestBuilder> indexRequests = new ArrayList<>();
|
final List<IndexRequestBuilder> indexRequests = new ArrayList<>();
|
||||||
final Set<String> stringTerms = new HashSet<>();
|
final Set<String> stringTerms = new HashSet<>();
|
||||||
final LongSet longTerms = new LongOpenHashSet();
|
final LongSet longTerms = new LongHashSet();
|
||||||
final Set<String> dateTerms = new HashSet<>();
|
final Set<String> dateTerms = new HashSet<>();
|
||||||
for (int i = 0; i < cardinality; ++i) {
|
for (int i = 0; i < cardinality; ++i) {
|
||||||
String stringTerm;
|
String stringTerm;
|
||||||
|
|
|
@ -19,8 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.BitMixer;
|
||||||
import com.carrotsearch.hppc.hash.MurmurHash3;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import org.elasticsearch.common.util.BigArrays;
|
import org.elasticsearch.common.util.BigArrays;
|
||||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -62,12 +62,12 @@ public class HyperLogLogPlusPlusTests extends ElasticsearchTestCase {
|
||||||
final int numValues = randomIntBetween(1, 100000);
|
final int numValues = randomIntBetween(1, 100000);
|
||||||
final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 100000);
|
final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 100000);
|
||||||
final int p = randomIntBetween(14, MAX_PRECISION);
|
final int p = randomIntBetween(14, MAX_PRECISION);
|
||||||
IntOpenHashSet set = new IntOpenHashSet();
|
IntHashSet set = new IntHashSet();
|
||||||
HyperLogLogPlusPlus e = new HyperLogLogPlusPlus(p, BigArrays.NON_RECYCLING_INSTANCE, 1);
|
HyperLogLogPlusPlus e = new HyperLogLogPlusPlus(p, BigArrays.NON_RECYCLING_INSTANCE, 1);
|
||||||
for (int i = 0; i < numValues; ++i) {
|
for (int i = 0; i < numValues; ++i) {
|
||||||
final int n = randomInt(maxValue);
|
final int n = randomInt(maxValue);
|
||||||
set.add(n);
|
set.add(n);
|
||||||
final long hash = MurmurHash3.hash((long) n);
|
final long hash = BitMixer.mix64(n);
|
||||||
e.collect(bucket, hash);
|
e.collect(bucket, hash);
|
||||||
if (randomInt(100) == 0) {
|
if (randomInt(100) == 0) {
|
||||||
//System.out.println(e.cardinality(bucket) + " <> " + set.size());
|
//System.out.println(e.cardinality(bucket) + " <> " + set.size());
|
||||||
|
@ -91,7 +91,7 @@ public class HyperLogLogPlusPlusTests extends ElasticsearchTestCase {
|
||||||
final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 1000000);
|
final int maxValue = randomIntBetween(1, randomBoolean() ? 1000: 1000000);
|
||||||
for (int i = 0; i < numValues; ++i) {
|
for (int i = 0; i < numValues; ++i) {
|
||||||
final int n = randomInt(maxValue);
|
final int n = randomInt(maxValue);
|
||||||
final long hash = MurmurHash3.hash((long) n);
|
final long hash = BitMixer.mix64(n);
|
||||||
single.collect(0, hash);
|
single.collect(0, hash);
|
||||||
// use a gaussian so that all instances don't collect as many hashes
|
// use a gaussian so that all instances don't collect as many hashes
|
||||||
final int index = (int) (Math.pow(randomDouble(), 2));
|
final int index = (int) (Math.pow(randomDouble(), 2));
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.scroll;
|
package org.elasticsearch.search.scroll;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
|
@ -159,7 +159,7 @@ public class DuelScrollTests extends ElasticsearchIntegrationTest {
|
||||||
boolean unevenRouting = randomBoolean();
|
boolean unevenRouting = randomBoolean();
|
||||||
|
|
||||||
int numMissingDocs = scaledRandomIntBetween(0, numDocs / 100);
|
int numMissingDocs = scaledRandomIntBetween(0, numDocs / 100);
|
||||||
IntOpenHashSet missingDocs = new IntOpenHashSet(numMissingDocs);
|
IntHashSet missingDocs = new IntHashSet(numMissingDocs);
|
||||||
for (int i = 0; i < numMissingDocs; i++) {
|
for (int i = 0; i < numMissingDocs; i++) {
|
||||||
while (!missingDocs.add(randomInt(numDocs))) {}
|
while (!missingDocs.add(randomInt(numDocs))) {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.suggest;
|
package org.elasticsearch.search.suggest;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
|
@ -751,7 +751,7 @@ public class CompletionSuggestSearchTests extends ElasticsearchIntegrationTest {
|
||||||
|
|
||||||
// regexes
|
// regexes
|
||||||
IndicesStatsResponse regexFieldStats = client().admin().indices().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).setCompletionFields("*").get();
|
IndicesStatsResponse regexFieldStats = client().admin().indices().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).setCompletionFields("*").get();
|
||||||
ObjectLongOpenHashMap<String> fields = regexFieldStats.getIndex(INDEX).getPrimaries().completion.getFields();
|
ObjectLongHashMap<String> fields = regexFieldStats.getIndex(INDEX).getPrimaries().completion.getFields();
|
||||||
long regexSizeInBytes = fields.get(FIELD) + fields.get(otherField);
|
long regexSizeInBytes = fields.get(FIELD) + fields.get(otherField);
|
||||||
assertThat(regexSizeInBytes, is(totalSizeInBytes));
|
assertThat(regexSizeInBytes, is(totalSizeInBytes));
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.suggest.completion;
|
package org.elasticsearch.search.suggest.completion;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
|
import com.carrotsearch.hppc.ObjectLongHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.codecs.CodecUtil;
|
import org.apache.lucene.codecs.CodecUtil;
|
||||||
|
@ -261,9 +261,9 @@ public class AnalyzingCompletionLookupProviderV1 extends CompletionLookupProvide
|
||||||
@Override
|
@Override
|
||||||
public CompletionStats stats(String... fields) {
|
public CompletionStats stats(String... fields) {
|
||||||
long sizeInBytes = 0;
|
long sizeInBytes = 0;
|
||||||
ObjectLongOpenHashMap<String> completionFields = null;
|
ObjectLongHashMap<String> completionFields = null;
|
||||||
if (fields != null && fields.length > 0) {
|
if (fields != null && fields.length > 0) {
|
||||||
completionFields = new ObjectLongOpenHashMap<>(fields.length);
|
completionFields = new ObjectLongHashMap<>(fields.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) {
|
for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) {
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.snapshots;
|
package org.elasticsearch.snapshots;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.IntSet;
|
import com.carrotsearch.hppc.IntSet;
|
||||||
import com.google.common.base.Predicate;
|
import com.google.common.base.Predicate;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
@ -581,7 +581,7 @@ public class DedicatedClusterSnapshotRestoreTests extends AbstractSnapshotTests
|
||||||
ensureGreen("test-idx");
|
ensureGreen("test-idx");
|
||||||
assertThat(client().prepareCount("test-idx").get().getCount(), equalTo(100L));
|
assertThat(client().prepareCount("test-idx").get().getCount(), equalTo(100L));
|
||||||
|
|
||||||
IntSet reusedShards = IntOpenHashSet.newInstance();
|
IntSet reusedShards = new IntHashSet();
|
||||||
for (ShardRecoveryResponse response : client().admin().indices().prepareRecoveries("test-idx").get().shardResponses().get("test-idx")) {
|
for (ShardRecoveryResponse response : client().admin().indices().prepareRecoveries("test-idx").get().shardResponses().get("test-idx")) {
|
||||||
if (response.recoveryState().getIndex().reusedBytes() > 0) {
|
if (response.recoveryState().getIndex().reusedBytes() > 0) {
|
||||||
reusedShards.add(response.getShardId());
|
reusedShards.add(response.getShardId());
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.transport.netty;
|
package org.elasticsearch.transport.netty;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.google.common.base.Charsets;
|
import com.google.common.base.Charsets;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||||
|
@ -156,7 +156,7 @@ public class NettyTransportMultiPortTests extends ElasticsearchTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private int[] getRandomPorts(int numberOfPorts) {
|
private int[] getRandomPorts(int numberOfPorts) {
|
||||||
IntOpenHashSet ports = new IntOpenHashSet();
|
IntHashSet ports = new IntHashSet();
|
||||||
|
|
||||||
for (int i = 0; i < numberOfPorts; i++) {
|
for (int i = 0; i < numberOfPorts; i++) {
|
||||||
int port = randomIntBetween(49152, 65535);
|
int port = randomIntBetween(49152, 65535);
|
||||||
|
|
Loading…
Reference in New Issue