upgraded to new trove, major upgrade, better perf, but damn, 2mb larger in size...

This commit is contained in:
kimchy 2011-01-30 00:05:56 +02:00
parent 0b09fd0806
commit ad4bb464e8
59 changed files with 134 additions and 303 deletions

View File

@ -19,7 +19,7 @@
package org.apache.lucene.queryParser;
import org.elasticsearch.common.trove.ExtTObjectFloatHashMap;
import org.elasticsearch.common.trove.map.hash.TObjectFloatHashMap;
import java.util.List;
@ -29,7 +29,7 @@ import java.util.List;
public class MultiFieldQueryParserSettings extends QueryParserSettings {
List<String> fields = null;
ExtTObjectFloatHashMap<String> boosts = null;
TObjectFloatHashMap<String> boosts = null;
float tieBreaker = 0.0f;
boolean useDisMax = true;
@ -41,11 +41,11 @@ public class MultiFieldQueryParserSettings extends QueryParserSettings {
this.fields = fields;
}
public ExtTObjectFloatHashMap<String> boosts() {
public TObjectFloatHashMap<String> boosts() {
return boosts;
}
public void boosts(ExtTObjectFloatHashMap<String> boosts) {
public void boosts(TObjectFloatHashMap<String> boosts) {
this.boosts = boosts;
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.io.stream;
import org.elasticsearch.common.trove.TIntObjectHashMap;
import org.elasticsearch.common.trove.map.hash.TIntObjectHashMap;
import java.io.IOException;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.common.io.stream;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.common.trove.TObjectIntHashMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import java.io.IOException;
import java.util.Arrays;
@ -37,7 +37,7 @@ public class HandlesStreamOutput extends StreamOutput {
private StreamOutput out;
private final TObjectIntHashMap<String> handles = new ExtTObjectIntHasMap<String>().defaultReturnValue(-1);
private final TObjectIntHashMap<String> handles = new TObjectIntHashMap<String>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
private final HandleTable identityHandles = new HandleTable(10, (float) 3.00);

View File

@ -29,7 +29,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.TIntArrayList;
import org.elasticsearch.common.trove.list.array.TIntArrayList;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;

View File

@ -19,14 +19,15 @@
package org.elasticsearch.common.lucene.versioned;
import org.elasticsearch.common.trove.ExtTIntIntHashMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.map.hash.TIntIntHashMap;
import org.elasticsearch.common.util.concurrent.ThreadSafe;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* An implementation of {@link VersionedMap} based on trove {@link org.elasticsearch.common.trove.TIntIntHashMap}.
* An implementation of {@link VersionedMap} based on trove.
*
* @author kimchy (Shay Banon)
*/
@ -140,10 +141,10 @@ public class NativeVersionedMap implements VersionedMap {
private static class Segment {
final ReadWriteLock rwl = new ReentrantReadWriteLock();
final ExtTIntIntHashMap map = new ExtTIntIntHashMap();
final TIntIntHashMap map = new TIntIntHashMap(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, 0, -1);
private Segment() {
map.defaultReturnValue(-1);
}
}
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.common.transport;
import org.elasticsearch.common.trove.TIntArrayList;
import org.elasticsearch.common.trove.list.array.TIntArrayList;
import java.util.StringTokenizer;
@ -42,7 +42,7 @@ public class PortsRange {
return false;
}
});
return ports.toNativeArray();
return ports.toArray(new int[ports.size()]);
}
public boolean iterate(PortCallback callback) throws NumberFormatException {

View File

@ -19,6 +19,8 @@
package org.elasticsearch.common.trove;
import org.elasticsearch.common.trove.list.array.TIntArrayList;
/**
* @author kimchy (Shay Banon)
*/

View File

@ -1,63 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.trove;
/**
* @author kimchy (Shay Banon)
*/
public class ExtTIntIntHashMap extends TIntIntHashMap {
private int defaultReturnValue = 0;
public ExtTIntIntHashMap() {
}
public ExtTIntIntHashMap(int initialCapacity) {
super(initialCapacity);
}
public ExtTIntIntHashMap(int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor);
}
public ExtTIntIntHashMap(TIntHashingStrategy strategy) {
super(strategy);
}
public ExtTIntIntHashMap(int initialCapacity, TIntHashingStrategy strategy) {
super(initialCapacity, strategy);
}
public ExtTIntIntHashMap(int initialCapacity, float loadFactor, TIntHashingStrategy strategy) {
super(initialCapacity, loadFactor, strategy);
}
public ExtTIntIntHashMap defaultReturnValue(int defaultReturnValue) {
this.defaultReturnValue = defaultReturnValue;
return this;
}
@Override public int get(int key) {
int index = index(key);
return index < 0 ? defaultReturnValue : _values[index];
}
}

View File

@ -1,61 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.trove;
/**
* @author kimchy (shay.banon)
*/
public class ExtTObjectFloatHashMap<T> extends TObjectFloatHashMap<T> {
private float defaultReturnValue = 0;
public ExtTObjectFloatHashMap() {
}
public ExtTObjectFloatHashMap(int initialCapacity) {
super(initialCapacity);
}
public ExtTObjectFloatHashMap(int initialCapacity, float loadFactor) {
super(initialCapacity, loadFactor);
}
public ExtTObjectFloatHashMap(TObjectHashingStrategy<T> ttObjectHashingStrategy) {
super(ttObjectHashingStrategy);
}
public ExtTObjectFloatHashMap(int initialCapacity, TObjectHashingStrategy<T> ttObjectHashingStrategy) {
super(initialCapacity, ttObjectHashingStrategy);
}
public ExtTObjectFloatHashMap(int initialCapacity, float loadFactor, TObjectHashingStrategy<T> ttObjectHashingStrategy) {
super(initialCapacity, loadFactor, ttObjectHashingStrategy);
}
public ExtTObjectFloatHashMap<T> defaultReturnValue(float defaultReturnValue) {
this.defaultReturnValue = defaultReturnValue;
return this;
}
@Override public float get(T key) {
int index = index(key);
return index < 0 ? defaultReturnValue : _values[index];
}
}

View File

@ -19,13 +19,13 @@
package org.elasticsearch.common.trove;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
/**
* @author kimchy (Shay Banon)
*/
public class ExtTObjectIntHasMap<T> extends TObjectIntHashMap<T> {
private int defaultReturnValue = 0;
public ExtTObjectIntHasMap() {
}
@ -37,21 +37,8 @@ public class ExtTObjectIntHasMap<T> extends TObjectIntHashMap<T> {
super(initialCapacity, loadFactor);
}
public ExtTObjectIntHasMap(TObjectHashingStrategy<T> ttObjectHashingStrategy) {
super(ttObjectHashingStrategy);
}
public ExtTObjectIntHasMap(int initialCapacity, TObjectHashingStrategy<T> ttObjectHashingStrategy) {
super(initialCapacity, ttObjectHashingStrategy);
}
public ExtTObjectIntHasMap(int initialCapacity, float loadFactor, TObjectHashingStrategy<T> ttObjectHashingStrategy) {
super(initialCapacity, loadFactor, ttObjectHashingStrategy);
}
public ExtTObjectIntHasMap<T> defaultReturnValue(int defaultReturnValue) {
this.defaultReturnValue = defaultReturnValue;
return this;
public ExtTObjectIntHasMap(int initialCapacity, float loadFactor, int noEntryValue) {
super(initialCapacity, loadFactor, noEntryValue);
}
/**
@ -61,9 +48,4 @@ public class ExtTObjectIntHasMap<T> extends TObjectIntHashMap<T> {
int index = index(key);
return index < 0 ? null : (T) _set[index];
}
@Override public final int get(T key) {
int index = index(key);
return index < 0 ? defaultReturnValue : _values[index];
}
}

View File

@ -1,36 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.trove;
import java.util.Arrays;
/**
* @author kimchy (shay.banon)
*/
public class TBytesHashingStrategy implements TObjectHashingStrategy<byte[]> {
@Override public int computeHashCode(byte[] bytes) {
return Arrays.hashCode(bytes);
}
@Override public boolean equals(byte[] bytes, byte[] bytes1) {
return Arrays.equals(bytes, bytes1);
}
}

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.id.IdCache;
@ -236,7 +237,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache {
}
static class TypeBuilder {
final ExtTObjectIntHasMap<BytesWrap> idToDoc = new ExtTObjectIntHasMap<BytesWrap>().defaultReturnValue(-1);
final ExtTObjectIntHasMap<BytesWrap> idToDoc = new ExtTObjectIntHasMap<BytesWrap>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
final ArrayList<BytesWrap> parentIdsValues = new ArrayList<BytesWrap>();
final int[] parentIdsOrdinals;

View File

@ -39,7 +39,7 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
public SimpleIdReaderTypeCache(String type, ExtTObjectIntHasMap<BytesWrap> idToDoc,
BytesWrap[] parentIdsValues, int[] parentIdsOrdinals) {
this.type = type;
this.idToDoc = idToDoc.defaultReturnValue(-1);
this.idToDoc = idToDoc;
this.idToDoc.trimToSize();
this.parentIdsValues = parentIdsValues;
this.parentIdsOrdinals = parentIdsOrdinals;

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.field.data.bytes;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.trove.TByteArrayList;
import org.elasticsearch.common.trove.list.array.TByteArrayList;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -134,11 +134,11 @@ public abstract class ByteFieldData extends NumericFieldData<ByteDocFieldData> {
}
@Override public ByteFieldData buildSingleValue(String field, int[] ordinals) {
return new SingleValueByteFieldData(field, ordinals, terms.toNativeArray());
return new SingleValueByteFieldData(field, ordinals, terms.toArray());
}
@Override public ByteFieldData buildMultiValue(String field, int[][] ordinals) {
return new MultiValueByteFieldData(field, ordinals, terms.toNativeArray());
return new MultiValueByteFieldData(field, ordinals, terms.toArray());
}
}
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.field.data.doubles;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.trove.TDoubleArrayList;
import org.elasticsearch.common.trove.list.array.TDoubleArrayList;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -134,11 +134,11 @@ public abstract class DoubleFieldData extends NumericFieldData<DoubleDocFieldDat
}
@Override public DoubleFieldData buildSingleValue(String field, int[] ordinals) {
return new SingleValueDoubleFieldData(field, ordinals, terms.toNativeArray());
return new SingleValueDoubleFieldData(field, ordinals, terms.toArray());
}
@Override public DoubleFieldData buildMultiValue(String field, int[][] ordinals) {
return new MultiValueDoubleFieldData(field, ordinals, terms.toNativeArray());
return new MultiValueDoubleFieldData(field, ordinals, terms.toArray());
}
}
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.field.data.floats;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.trove.TFloatArrayList;
import org.elasticsearch.common.trove.list.array.TFloatArrayList;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -134,11 +134,11 @@ public abstract class FloatFieldData extends NumericFieldData<FloatDocFieldData>
}
@Override public FloatFieldData buildSingleValue(String field, int[] ordinals) {
return new SingleValueFloatFieldData(field, ordinals, terms.toNativeArray());
return new SingleValueFloatFieldData(field, ordinals, terms.toArray());
}
@Override public FloatFieldData buildMultiValue(String field, int[][] ordinals) {
return new MultiValueFloatFieldData(field, ordinals, terms.toNativeArray());
return new MultiValueFloatFieldData(field, ordinals, terms.toArray());
}
}
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.field.data.ints;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.trove.TIntArrayList;
import org.elasticsearch.common.trove.list.array.TIntArrayList;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -134,11 +134,11 @@ public abstract class IntFieldData extends NumericFieldData<IntDocFieldData> {
}
@Override public IntFieldData buildSingleValue(String field, int[] ordinals) {
return new SingleValueIntFieldData(field, ordinals, terms.toNativeArray());
return new SingleValueIntFieldData(field, ordinals, terms.toArray());
}
@Override public IntFieldData buildMultiValue(String field, int[][] ordinals) {
return new MultiValueIntFieldData(field, ordinals, terms.toNativeArray());
return new MultiValueIntFieldData(field, ordinals, terms.toArray());
}
}
}

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.joda.time.DateTimeZone;
import org.elasticsearch.common.joda.time.MutableDateTime;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TLongArrayList;
import org.elasticsearch.common.trove.list.array.TLongArrayList;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -164,11 +164,11 @@ public abstract class LongFieldData extends NumericFieldData<LongDocFieldData> {
}
@Override public LongFieldData buildSingleValue(String field, int[] ordinals) {
return new SingleValueLongFieldData(field, ordinals, terms.toNativeArray());
return new SingleValueLongFieldData(field, ordinals, terms.toArray());
}
@Override public LongFieldData buildMultiValue(String field, int[][] ordinals) {
return new MultiValueLongFieldData(field, ordinals, terms.toNativeArray());
return new MultiValueLongFieldData(field, ordinals, terms.toArray());
}
}
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.field.data.shorts;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.trove.TShortArrayList;
import org.elasticsearch.common.trove.list.array.TShortArrayList;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -134,11 +134,11 @@ public abstract class ShortFieldData extends NumericFieldData<ShortDocFieldData>
}
@Override public ShortFieldData buildSingleValue(String field, int[] ordinals) {
return new SingleValueShortFieldData(field, ordinals, terms.toNativeArray());
return new SingleValueShortFieldData(field, ordinals, terms.toArray());
}
@Override public ShortFieldData buildMultiValue(String field, int[][] ordinals) {
return new MultiValueShortFieldData(field, ordinals, terms.toNativeArray());
return new MultiValueShortFieldData(field, ordinals, terms.toArray());
}
}
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.mapper.xcontent.geo;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TDoubleArrayList;
import org.elasticsearch.common.trove.list.array.TDoubleArrayList;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.field.data.FieldData;
import org.elasticsearch.index.field.data.FieldDataType;
@ -166,11 +166,11 @@ public abstract class GeoPointFieldData extends FieldData<GeoPointDocFieldData>
}
@Override public GeoPointFieldData buildSingleValue(String field, int[] ordinals) {
return new SingleValueGeoPointFieldData(field, ordinals, lat.toNativeArray(), lon.toNativeArray());
return new SingleValueGeoPointFieldData(field, ordinals, lat.toArray(), lon.toArray());
}
@Override public GeoPointFieldData buildMultiValue(String field, int[][] ordinals) {
return new MultiValueGeoPointFieldData(field, ordinals, lat.toNativeArray(), lon.toNativeArray());
return new MultiValueGeoPointFieldData(field, ordinals, lat.toArray(), lon.toArray());
}
}
}

View File

@ -27,7 +27,7 @@ import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.BytesWrap;
import org.elasticsearch.common.lucene.search.EmptyScorer;
import org.elasticsearch.common.trove.TIntObjectHashMap;
import org.elasticsearch.common.trove.map.hash.TIntObjectHashMap;
import org.elasticsearch.search.internal.ScopePhase;
import org.elasticsearch.search.internal.SearchContext;
@ -157,7 +157,7 @@ public class TopChildrenQuery extends Query implements ScopePhase.TopDocsPhase {
this.parentDocs = new HashMap<Object, ParentDoc[]>();
for (Map.Entry<Object, TIntObjectHashMap<ParentDoc>> entry : parentDocsPerReader.entrySet()) {
ParentDoc[] values = entry.getValue().getValues(new ParentDoc[entry.getValue().size()]);
ParentDoc[] values = entry.getValue().values(new ParentDoc[entry.getValue().size()]);
Arrays.sort(values, PARENT_DOC_COMP);
parentDocs.put(entry.getKey(), values);
}

View File

@ -19,7 +19,8 @@
package org.elasticsearch.index.query.xcontent;
import org.elasticsearch.common.trove.ExtTObjectFloatHashMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.map.hash.TObjectFloatHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
@ -67,7 +68,7 @@ public class QueryStringQueryBuilder extends BaseQueryBuilder {
private List<String> fields;
private ExtTObjectFloatHashMap<String> fieldsBoosts;
private TObjectFloatHashMap<String> fieldsBoosts;
private Boolean useDisMax;
@ -106,7 +107,7 @@ public class QueryStringQueryBuilder extends BaseQueryBuilder {
}
fields.add(field);
if (fieldsBoosts == null) {
fieldsBoosts = new ExtTObjectFloatHashMap<String>().defaultReturnValue(-1);
fieldsBoosts = new TObjectFloatHashMap<String>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
}
fieldsBoosts.put(field, boost);
return this;

View File

@ -29,7 +29,8 @@ import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.trove.ExtTObjectFloatHashMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.map.hash.TObjectFloatHashMap;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
@ -102,7 +103,7 @@ public class QueryStringQueryParser extends AbstractIndexComponent implements XC
qpSettings.fields().add(field);
if (fBoost != -1) {
if (qpSettings.boosts() == null) {
qpSettings.boosts(new ExtTObjectFloatHashMap<String>().defaultReturnValue(1.0f));
qpSettings.boosts(new TObjectFloatHashMap<String>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, 1.0f));
}
qpSettings.boosts().put(field, fBoost);
}
@ -111,7 +112,7 @@ public class QueryStringQueryParser extends AbstractIndexComponent implements XC
qpSettings.fields().add(fField);
if (fBoost != -1) {
if (qpSettings.boosts() == null) {
qpSettings.boosts(new ExtTObjectFloatHashMap<String>().defaultReturnValue(1.0f));
qpSettings.boosts(new TObjectFloatHashMap<String>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, 1.0f));
}
qpSettings.boosts().put(fField, fBoost);
}

View File

@ -17,7 +17,7 @@
package org.elasticsearch.index.search.geo;
import org.elasticsearch.common.trove.TIntIntHashMap;
import org.elasticsearch.common.trove.map.hash.TIntIntHashMap;
/**
* Utilities for encoding and decoding geohashes. Based on

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.Unicode;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.io.FastByteArrayOutputStream;
import org.elasticsearch.common.trove.TObjectFloatHashMap;
import org.elasticsearch.common.trove.TObjectFloatIterator;
import org.elasticsearch.common.trove.iterator.TObjectFloatIterator;
import org.elasticsearch.common.trove.map.hash.TObjectFloatHashMap;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;

View File

@ -29,7 +29,8 @@ import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.collect.Ordering;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.trove.ExtTIntArrayList;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.search.dfs.DfsSearchResult;
@ -73,7 +74,7 @@ public class SearchPhaseController {
}
public AggregatedDfs aggregateDfs(Iterable<DfsSearchResult> results) {
ExtTObjectIntHasMap<Term> dfMap = new ExtTObjectIntHasMap<Term>().defaultReturnValue(-1);
TObjectIntHashMap<Term> dfMap = new TObjectIntHashMap<Term>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, -1);
long aggMaxDoc = 0;
for (DfsSearchResult result : results) {
for (int i = 0; i < result.freqs().length; i++) {

View File

@ -24,7 +24,9 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.common.trove.TObjectIntIterator;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.iterator.TObjectIntIterator;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import java.io.IOException;
@ -33,7 +35,7 @@ import java.io.IOException;
*/
public class AggregatedDfs implements Streamable {
private ExtTObjectIntHasMap<Term> dfMap;
private TObjectIntHashMap<Term> dfMap;
private long maxDoc;
@ -41,12 +43,12 @@ public class AggregatedDfs implements Streamable {
}
public AggregatedDfs(ExtTObjectIntHasMap<Term> dfMap, long maxDoc) {
this.dfMap = dfMap.defaultReturnValue(-1);
public AggregatedDfs(TObjectIntHashMap<Term> dfMap, long maxDoc) {
this.dfMap = dfMap;
this.maxDoc = maxDoc;
}
public ExtTObjectIntHasMap<Term> dfMap() {
public TObjectIntHashMap<Term> dfMap() {
return dfMap;
}
@ -62,7 +64,7 @@ public class AggregatedDfs implements Streamable {
@Override public void readFrom(StreamInput in) throws IOException {
int size = in.readVInt();
dfMap = new ExtTObjectIntHasMap<Term>(size).defaultReturnValue(-1);
dfMap = new ExtTObjectIntHasMap<Term>(size, Constants.DEFAULT_LOAD_FACTOR, -1);
for (int i = 0; i < size; i++) {
dfMap.put(new Term(in.readUTF(), in.readUTF()), in.readVInt());
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.search.dfs;
import org.apache.lucene.index.Term;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.THashSet;
import org.elasticsearch.common.trove.set.hash.THashSet;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchPhase;
import org.elasticsearch.search.internal.SearchContext;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.joda.time.MutableDateTime;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.longs.LongFieldData;

View File

@ -28,8 +28,8 @@ import org.elasticsearch.common.joda.time.DateTimeField;
import org.elasticsearch.common.joda.time.DateTimeZone;
import org.elasticsearch.common.joda.time.MutableDateTime;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.common.trove.TObjectIntHashMap;
import org.elasticsearch.common.trove.impl.Constants;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.field.data.FieldDataType;
@ -50,7 +50,7 @@ import java.util.Map;
public class DateHistogramFacetProcessor extends AbstractComponent implements FacetProcessor {
private final ImmutableMap<String, DateFieldParser> dateFieldParsers;
private final TObjectIntHashMap<String> rounding = new ExtTObjectIntHasMap<String>().defaultReturnValue(MutableDateTime.ROUND_FLOOR);
private final TObjectIntHashMap<String> rounding = new TObjectIntHashMap<String>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, MutableDateTime.ROUND_FLOOR);
@Inject public DateHistogramFacetProcessor(Settings settings) {
super(settings);

View File

@ -22,10 +22,10 @@ package org.elasticsearch.search.facet.datehistogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongDoubleIterator;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.TLongLongIterator;
import org.elasticsearch.common.trove.iterator.TLongDoubleIterator;
import org.elasticsearch.common.trove.iterator.TLongLongIterator;
import org.elasticsearch.common.trove.map.hash.TLongDoubleHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -22,8 +22,8 @@ package org.elasticsearch.search.facet.datehistogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.TLongLongIterator;
import org.elasticsearch.common.trove.iterator.TLongLongIterator;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -21,8 +21,8 @@ package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.joda.time.MutableDateTime;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.map.hash.TLongDoubleHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;

View File

@ -21,8 +21,8 @@ package org.elasticsearch.search.facet.datehistogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.joda.time.MutableDateTime;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.map.hash.TLongDoubleHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.longs.LongFieldData;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.map.hash.TLongDoubleHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;

View File

@ -22,10 +22,10 @@ package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongDoubleIterator;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.TLongLongIterator;
import org.elasticsearch.common.trove.iterator.TLongDoubleIterator;
import org.elasticsearch.common.trove.iterator.TLongLongIterator;
import org.elasticsearch.common.trove.map.hash.TLongDoubleHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -22,8 +22,8 @@ package org.elasticsearch.search.facet.histogram;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.TLongLongIterator;
import org.elasticsearch.common.trove.iterator.TLongLongIterator;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.map.hash.TLongDoubleHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.map.hash.TLongDoubleHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.search.facet.histogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.common.trove.map.hash.TLongDoubleHashMap;
import org.elasticsearch.common.trove.map.hash.TLongLongHashMap;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TByteIntHashMap;
import org.elasticsearch.common.trove.TByteIntIterator;
import org.elasticsearch.common.trove.iterator.TByteIntIterator;
import org.elasticsearch.common.trove.map.hash.TByteIntHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TByteIntHashMap;
import org.elasticsearch.common.trove.TByteIntIterator;
import org.elasticsearch.common.trove.iterator.TByteIntIterator;
import org.elasticsearch.common.trove.map.hash.TByteIntHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.bytes.ByteFieldData;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TDoubleIntHashMap;
import org.elasticsearch.common.trove.TDoubleIntIterator;
import org.elasticsearch.common.trove.iterator.TDoubleIntIterator;
import org.elasticsearch.common.trove.map.hash.TDoubleIntHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TDoubleIntHashMap;
import org.elasticsearch.common.trove.TDoubleIntIterator;
import org.elasticsearch.common.trove.iterator.TDoubleIntIterator;
import org.elasticsearch.common.trove.map.hash.TDoubleIntHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.doubles.DoubleFieldData;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TFloatIntHashMap;
import org.elasticsearch.common.trove.TFloatIntIterator;
import org.elasticsearch.common.trove.iterator.TFloatIntIterator;
import org.elasticsearch.common.trove.map.hash.TFloatIntHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TFloatIntHashMap;
import org.elasticsearch.common.trove.TFloatIntIterator;
import org.elasticsearch.common.trove.iterator.TFloatIntIterator;
import org.elasticsearch.common.trove.map.hash.TFloatIntHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.floats.FloatFieldData;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TIntIntHashMap;
import org.elasticsearch.common.trove.TIntIntIterator;
import org.elasticsearch.common.trove.iterator.TIntIntIterator;
import org.elasticsearch.common.trove.map.hash.TIntIntHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TIntIntHashMap;
import org.elasticsearch.common.trove.TIntIntIterator;
import org.elasticsearch.common.trove.iterator.TIntIntIterator;
import org.elasticsearch.common.trove.map.hash.TIntIntHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.ints.IntFieldData;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TLongIntHashMap;
import org.elasticsearch.common.trove.TLongIntIterator;
import org.elasticsearch.common.trove.iterator.TLongIntIterator;
import org.elasticsearch.common.trove.map.hash.TLongIntHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TLongIntHashMap;
import org.elasticsearch.common.trove.TLongIntIterator;
import org.elasticsearch.common.trove.iterator.TLongIntIterator;
import org.elasticsearch.common.trove.map.hash.TLongIntHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.longs.LongFieldData;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TShortIntHashMap;
import org.elasticsearch.common.trove.TShortIntIterator;
import org.elasticsearch.common.trove.iterator.TShortIntIterator;
import org.elasticsearch.common.trove.map.hash.TShortIntHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TShortIntHashMap;
import org.elasticsearch.common.trove.TShortIntIterator;
import org.elasticsearch.common.trove.iterator.TShortIntIterator;
import org.elasticsearch.common.trove.map.hash.TShortIntHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.shorts.ShortFieldData;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.ImmutableSet;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.trove.TObjectIntHashMap;
import org.elasticsearch.common.trove.TObjectIntIterator;
import org.elasticsearch.common.trove.iterator.TObjectIntIterator;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldData;
import org.elasticsearch.index.field.data.FieldDataType;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TObjectIntHashMap;
import org.elasticsearch.common.trove.TObjectIntIterator;
import org.elasticsearch.common.trove.iterator.TObjectIntIterator;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.facet.Facet;

View File

@ -23,8 +23,8 @@ import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.collect.BoundedTreeSet;
import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.ImmutableSet;
import org.elasticsearch.common.trove.TObjectIntHashMap;
import org.elasticsearch.common.trove.TObjectIntIterator;
import org.elasticsearch.common.trove.iterator.TObjectIntIterator;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.facet.AbstractFacetCollector;
import org.elasticsearch.search.facet.Facet;

View File

@ -25,8 +25,8 @@ import org.elasticsearch.common.collect.ImmutableList;
import org.elasticsearch.common.collect.ImmutableSet;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TObjectIntHashMap;
import org.elasticsearch.common.trove.TObjectIntIterator;
import org.elasticsearch.common.trove.iterator.TObjectIntIterator;
import org.elasticsearch.common.trove.map.hash.TObjectIntHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldData;
import org.elasticsearch.index.field.data.FieldDataType;

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TIntObjectHashMap;
import org.elasticsearch.common.trove.map.hash.TIntObjectHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.SearchHit;

View File

@ -10,7 +10,7 @@ dependencies {
runtime 'com.google.inject.extensions:guice-multibindings:3.0-rc2'
runtime 'com.google.guava:guava:r07'
runtime 'org.elasticsearch:es-trove:2.1.0'
runtime 'org.elasticsearch:es-trove:3.0.0rc1'
runtime 'org.elasticsearch:es-jsr166y:20100615'
runtime 'commons-codec:commons-codec:1.4'