Merge pull request #11144 from jpountz/fix/remove_hppc_esoteric_dep

Internal: remove dependency on hppc:esoteric.
This commit is contained in:
Adrien Grand 2015-05-15 16:25:51 +02:00
commit 3e215e720a
4 changed files with 11 additions and 20 deletions

View File

@ -229,13 +229,6 @@
<version>0.7.1</version>
</dependency>
<dependency> <!-- ES uses byte* hashes -->
<groupId>com.carrotsearch</groupId>
<artifactId>hppc</artifactId>
<version>0.7.1</version>
<classifier>esoteric</classifier>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>

View File

@ -19,9 +19,7 @@
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.DoubleHashSet;
import com.carrotsearch.hppc.LongArrayList;
import com.carrotsearch.hppc.LongHashSet;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.NumericTokenStream;

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.percolator;
import com.carrotsearch.hppc.ByteObjectHashMap;
import com.carrotsearch.hppc.IntObjectHashMap;
import com.google.common.collect.Lists;
import org.apache.lucene.index.LeafReaderContext;
@ -109,7 +109,7 @@ public class PercolatorService extends AbstractComponent {
public final static String TYPE_NAME = ".percolator";
private final IndicesService indicesService;
private final ByteObjectHashMap<PercolatorType> percolatorTypes;
private final IntObjectHashMap<PercolatorType> percolatorTypes;
private final PageCacheRecycler pageCacheRecycler;
private final BigArrays bigArrays;
private final ClusterService clusterService;
@ -153,7 +153,7 @@ public class PercolatorService extends AbstractComponent {
single = new SingleDocumentPercolatorIndex(cache);
multi = new MultiDocumentPercolatorIndex(cache);
percolatorTypes = new ByteObjectHashMap<>(6);
percolatorTypes = new IntObjectHashMap<>(6);
percolatorTypes.put(countPercolator.id(), countPercolator);
percolatorTypes.put(queryCountPercolator.id(), queryCountPercolator);
percolatorTypes.put(matchPercolator.id(), matchPercolator);

View File

@ -19,9 +19,7 @@
package org.elasticsearch.index.fielddata;
import com.carrotsearch.hppc.DoubleHashSet;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.cursors.DoubleCursor;
import com.carrotsearch.hppc.cursors.LongCursor;
import org.apache.lucene.document.Document;
@ -37,7 +35,9 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan;
/**
* Tests for all integer types (byte, short, int, long).
@ -348,7 +348,7 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
final SortedNumericDocValues data = atomicFieldData.getLongValues();
final SortedNumericDoubleValues doubleData = atomicFieldData.getDoubleValues();
final LongHashSet set = new LongHashSet();
final DoubleHashSet doubleSet = new DoubleHashSet();
final LongHashSet doubleSet = new LongHashSet();
for (int i = 0; i < values.size(); ++i) {
final LongHashSet v = values.get(i);
@ -365,17 +365,17 @@ public class LongFieldDataTests extends AbstractNumericFieldDataTests {
}
assertThat(set, equalTo(v));
final DoubleHashSet doubleV = new DoubleHashSet();
final LongHashSet doubleV = new LongHashSet();
for (LongCursor c : v) {
doubleV.add(c.value);
doubleV.add(Double.doubleToLongBits(c.value));
}
doubleSet.clear();
doubleData.setDocument(i);
numValues = doubleData.count();
double prev = 0;
for (int j = 0; j < numValues; j++) {
double current;
doubleSet.add(current = doubleData.valueAt(j));
double current = doubleData.valueAt(j);
doubleSet.add(Double.doubleToLongBits(current));
if (j > 0) {
assertThat(prev, lessThan(current));
}