diff --git a/lucene/ivy-settings.xml b/lucene/ivy-settings.xml
index 80dd3050cb5..e615d918f8c 100644
--- a/lucene/ivy-settings.xml
+++ b/lucene/ivy-settings.xml
@@ -41,16 +41,12 @@
-
-
-
@@ -59,7 +55,6 @@
-
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index baf0041d259..503df94610a 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -11,7 +11,7 @@ com.carrotsearch.randomizedtesting.version = 2.1.13
/com.carrotsearch.randomizedtesting/junit4-ant = ${com.carrotsearch.randomizedtesting.version}
/com.carrotsearch.randomizedtesting/randomizedtesting-runner = ${com.carrotsearch.randomizedtesting.version}
-/com.carrotsearch/hppc = 0.5.2
+/com.carrotsearch/hppc = 0.7.1
com.codahale.metrics.version = 3.0.1
/com.codahale.metrics/metrics-core = ${com.codahale.metrics.version}
@@ -211,10 +211,12 @@ org.bouncycastle.version = 1.45
/org.bouncycastle/bcmail-jdk15 = ${org.bouncycastle.version}
/org.bouncycastle/bcprov-jdk15 = ${org.bouncycastle.version}
-/org.carrot2.attributes/attributes-binder = 1.2.1
-/org.carrot2/carrot2-mini = 3.9.0
+/org.carrot2.attributes/attributes-binder = 1.2.3
+/org.carrot2.shaded/carrot2-guava = 18.0
-org.carrot2.morfologik.version = 1.7.1
+/org.carrot2/carrot2-mini = 3.10.3
+
+org.carrot2.morfologik.version = 1.10.0
/org.carrot2/morfologik-fsa = ${org.carrot2.morfologik.version}
/org.carrot2/morfologik-polish = ${org.carrot2.morfologik.version}
/org.carrot2/morfologik-stemming = ${org.carrot2.morfologik.version}
@@ -286,7 +288,7 @@ org.restlet.jee.version = 2.3.0
/org.restlet.jee/org.restlet = ${org.restlet.jee.version}
/org.restlet.jee/org.restlet.ext.servlet = ${org.restlet.jee.version}
-/org.simpleframework/simple-xml = 2.7
+/org.simpleframework/simple-xml = 2.7.1
org.slf4j.version = 1.7.7
/org.slf4j/jcl-over-slf4j = ${org.slf4j.version}
diff --git a/lucene/licenses/morfologik-fsa-1.10.0.jar.sha1 b/lucene/licenses/morfologik-fsa-1.10.0.jar.sha1
new file mode 100644
index 00000000000..0831b2c7659
--- /dev/null
+++ b/lucene/licenses/morfologik-fsa-1.10.0.jar.sha1
@@ -0,0 +1 @@
+87100c6baf60f096b42b9af06dafeb20f686cd02
diff --git a/lucene/licenses/morfologik-fsa-1.7.1.jar.sha1 b/lucene/licenses/morfologik-fsa-1.7.1.jar.sha1
deleted file mode 100644
index b71174e4d03..00000000000
--- a/lucene/licenses/morfologik-fsa-1.7.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fdf556c88d66f65440bd24024f55a52c227c0e3f
diff --git a/lucene/licenses/morfologik-polish-1.10.0.jar.sha1 b/lucene/licenses/morfologik-polish-1.10.0.jar.sha1
new file mode 100644
index 00000000000..7e6a54e69d0
--- /dev/null
+++ b/lucene/licenses/morfologik-polish-1.10.0.jar.sha1
@@ -0,0 +1 @@
+0f8eeb58acb5a39e162c0d49fcf29a70744cc2bc
diff --git a/lucene/licenses/morfologik-polish-1.7.1.jar.sha1 b/lucene/licenses/morfologik-polish-1.7.1.jar.sha1
deleted file mode 100644
index 3bd0d88ae67..00000000000
--- a/lucene/licenses/morfologik-polish-1.7.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e03b9feb39f6e2c0ac7c37e220d01cdae66d3a28
diff --git a/lucene/licenses/morfologik-stemming-1.10.0.jar.sha1 b/lucene/licenses/morfologik-stemming-1.10.0.jar.sha1
new file mode 100644
index 00000000000..e1459e0eadb
--- /dev/null
+++ b/lucene/licenses/morfologik-stemming-1.10.0.jar.sha1
@@ -0,0 +1 @@
+a74ad7ceb29ff1d8194eb161f5b2dfbd636626a5
diff --git a/lucene/licenses/morfologik-stemming-1.7.1.jar.sha1 b/lucene/licenses/morfologik-stemming-1.7.1.jar.sha1
deleted file mode 100644
index 3b53503b1c2..00000000000
--- a/lucene/licenses/morfologik-stemming-1.7.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c81d6c63e22e97819063cad7f1ecd20269cba720
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index ec3fd181e41..e03d811fec8 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -164,6 +164,10 @@ Optimizations
Other Changes
----------------------
+* SOLR-7790, SOLR-7792, SOLR-7791: Update Carrot2 clustering component to
+ version 3.10.3. Upgrade HPPC library to version 0.7.1, morfologik-stemming
+ to version 1.10.0. (Dawid Weiss)
+
* SOLR-7831: Start Scripts: Allow a configurable stack size [-Xss] (Steve Davids via Mark Miller)
* SOLR-7870: Write a test which asserts that requests to stateFormat=2 collection succeed on a node
diff --git a/solr/contrib/clustering/ivy.xml b/solr/contrib/clustering/ivy.xml
index d3b153cb372..b7ae2f1acd0 100644
--- a/solr/contrib/clustering/ivy.xml
+++ b/solr/contrib/clustering/ivy.xml
@@ -24,25 +24,22 @@
-
-
+
+
-
+
-
-
-
-
-
+
+
+
+
+
diff --git a/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java b/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java
index 2a366f18fc1..a3aa76ed943 100644
--- a/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java
+++ b/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngineTest.java
@@ -376,7 +376,7 @@ public class CarrotClusteringEngineTest extends AbstractClusteringTestCase {
params.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet");
final List labels = getLabels(checkEngine(
- getClusteringEngine("custom-duplicating-tokenizer"), 1, 16, new TermQuery(new Term("title",
+ getClusteringEngine("custom-duplicating-tokenizer"), 1, 15, new TermQuery(new Term("title",
"field")), params).get(0));
// The custom test tokenizer duplicates each token's text
diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
index 34ac972352b..6243df5de04 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
@@ -27,11 +27,11 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import com.carrotsearch.hppc.IntObjectOpenHashMap;
-import com.carrotsearch.hppc.IntOpenHashSet;
+import com.carrotsearch.hppc.IntHashSet;
+import com.carrotsearch.hppc.IntObjectHashMap;
+import com.carrotsearch.hppc.LongHashSet;
+import com.carrotsearch.hppc.LongObjectHashMap;
import com.carrotsearch.hppc.LongObjectMap;
-import com.carrotsearch.hppc.LongObjectOpenHashMap;
-import com.carrotsearch.hppc.LongOpenHashSet;
import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.carrotsearch.hppc.cursors.LongCursor;
import com.carrotsearch.hppc.cursors.LongObjectCursor;
@@ -247,9 +247,9 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
}
FixedBitSet groupBits = null;
- LongOpenHashSet groupSet = null;
+ LongHashSet groupSet = null;
DocList docList = rb.getResults().docList;
- IntOpenHashSet collapsedSet = new IntOpenHashSet(docList.size() * 2);
+ IntHashSet collapsedSet = new IntHashSet(docList.size() * 2);
//Gather the groups for the current page of documents
DocIterator idit = docList.iterator();
@@ -269,7 +269,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
int currentContext = 0;
int currentDocBase = contexts.get(currentContext).docBase;
int nextDocBase = (currentContext+1) ordBytes = null;
+ IntObjectHashMap ordBytes = null;
if(values != null) {
groupBits = new FixedBitSet(values.getValueCount());
MultiDocValues.OrdinalMap ordinalMap = null;
@@ -284,7 +284,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
}
int count = 0;
- ordBytes = new IntObjectOpenHashMap();
+ ordBytes = new IntObjectHashMap<>();
for(int i=0; i groups = ((GroupCollector) groupExpandCollector).getGroups();
NamedList outMap = new SimpleOrderedMap();
CharsRefBuilder charsRef = new CharsRefBuilder();
- for (LongObjectCursor cursor : (Iterable) groups) {
+ for (LongObjectCursor cursor : groups) {
long groupValue = cursor.key;
- TopDocsCollector topDocsCollector = (TopDocsCollector) cursor.value;
+ TopDocsCollector> topDocsCollector = TopDocsCollector.class.cast(cursor.value);
TopDocs topDocs = topDocsCollector.topDocs();
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
if (scoreDocs.length > 0) {
@@ -502,11 +502,11 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
private LongObjectMap groups;
private FixedBitSet groupBits;
- private IntOpenHashSet collapsedSet;
+ private IntHashSet collapsedSet;
- public GroupExpandCollector(SortedDocValues docValues, FixedBitSet groupBits, IntOpenHashSet collapsedSet, int limit, Sort sort) throws IOException {
+ public GroupExpandCollector(SortedDocValues docValues, FixedBitSet groupBits, IntHashSet collapsedSet, int limit, Sort sort) throws IOException {
int numGroups = collapsedSet.size();
- groups = new LongObjectOpenHashMap<>(numGroups * 2);
+ groups = new LongObjectHashMap<>(numGroups);
DocIdSetIterator iterator = new BitSetIterator(groupBits, 0); // cost is not useful here
int group;
while ((group = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
@@ -536,7 +536,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
this.segmentOrdinalMap = ordinalMap.getGlobalOrds(context.ord);
}
- final LongObjectMap leafCollectors = new LongObjectOpenHashMap<>();
+ final LongObjectMap leafCollectors = new LongObjectHashMap<>();
for (LongObjectCursor entry : groups) {
leafCollectors.put(entry.key, entry.value.getLeafCollector(context));
}
@@ -578,17 +578,16 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
private class NumericGroupExpandCollector implements Collector, GroupCollector {
private NumericDocValues docValues;
-
private String field;
- private LongObjectOpenHashMap groups;
+ private LongObjectHashMap groups;
- private IntOpenHashSet collapsedSet;
+ private IntHashSet collapsedSet;
private long nullValue;
- public NumericGroupExpandCollector(String field, long nullValue, LongOpenHashSet groupSet, IntOpenHashSet collapsedSet, int limit, Sort sort) throws IOException {
+ public NumericGroupExpandCollector(String field, long nullValue, LongHashSet groupSet, IntHashSet collapsedSet, int limit, Sort sort) throws IOException {
int numGroups = collapsedSet.size();
this.nullValue = nullValue;
- groups = new LongObjectOpenHashMap(numGroups * 2);
+ groups = new LongObjectHashMap<>(numGroups);
Iterator iterator = groupSet.iterator();
while (iterator.hasNext()) {
LongCursor cursor = iterator.next();
@@ -609,7 +608,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
final int docBase = context.docBase;
this.docValues = context.reader().getNumericDocValues(this.field);
- final LongObjectOpenHashMap leafCollectors = new LongObjectOpenHashMap<>();
+ final LongObjectHashMap leafCollectors = new LongObjectHashMap<>();
for (LongObjectCursor entry : groups) {
leafCollectors.put(entry.key, entry.value.getLeafCollector(context));
@@ -627,29 +626,30 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
@Override
public void collect(int docId) throws IOException {
long value = docValues.get(docId);
- if (value != nullValue && leafCollectors.containsKey(value) && !collapsedSet.contains(docId + docBase)) {
- LeafCollector c = leafCollectors.lget();
- c.collect(docId);
+ final int index;
+ if (value != nullValue &&
+ (index = leafCollectors.indexOf(value)) >= 0 &&
+ !collapsedSet.contains(docId + docBase)) {
+ leafCollectors.indexGet(index).collect(docId);
}
}
};
}
- public LongObjectOpenHashMap getGroups() {
+ public LongObjectHashMap getGroups() {
return groups;
}
}
private interface GroupCollector {
- public LongObjectMap getGroups();
-
+ public LongObjectMap getGroups();
}
private Query getGroupQuery(String fname,
FieldType ft,
int size,
- LongOpenHashSet groupSet) {
+ LongHashSet groupSet) {
BytesRef[] bytesRefs = new BytesRef[size];
BytesRefBuilder term = new BytesRefBuilder();
@@ -676,8 +676,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
private Query getGroupQuery(String fname,
int size,
- IntObjectOpenHashMap ordBytes) throws Exception {
-
+ IntObjectHashMap ordBytes) throws Exception {
BytesRef[] bytesRefs = new BytesRef[size];
int index = -1;
Iterator>it = ordBytes.iterator();
@@ -728,7 +727,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
public FieldInfos getFieldInfos() {
Iterator it = in.getFieldInfos().iterator();
- List newInfos = new ArrayList();
+ List newInfos = new ArrayList<>();
while(it.hasNext()) {
FieldInfo fieldInfo = it.next();
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
index b94e4b36478..1ee1f26c359 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
@@ -17,8 +17,6 @@
package org.apache.solr.handler.component;
-import com.carrotsearch.hppc.IntIntOpenHashMap;
-
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@@ -73,6 +71,8 @@ import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
+import com.carrotsearch.hppc.IntIntHashMap;
+
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
@@ -536,16 +536,16 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
}
- public static IntIntOpenHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Mapboosted, Map context) throws IOException {
+ public static IntIntHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Mapboosted, Map context) throws IOException {
- IntIntOpenHashMap boostDocs = null;
+ IntIntHashMap boostDocs = null;
if(boosted != null) {
//First see if it's already in the request context. Could have been put there
//by another caller.
if(context != null) {
- boostDocs = (IntIntOpenHashMap)context.get(BOOSTED_DOCIDS);
+ boostDocs = (IntIntHashMap) context.get(BOOSTED_DOCIDS);
}
if(boostDocs != null) {
@@ -555,13 +555,13 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
SchemaField idField = indexSearcher.getSchema().getUniqueKeyField();
String fieldName = idField.getName();
- HashSet localBoosts = new HashSet(boosted.size()*2);
+ HashSet localBoosts = new HashSet<>(boosted.size()*2);
Iterator boostedIt = boosted.keySet().iterator();
while(boostedIt.hasNext()) {
localBoosts.add(boostedIt.next());
}
- boostDocs = new IntIntOpenHashMap(boosted.size()*2);
+ boostDocs = new IntIntHashMap(boosted.size());
Listleaves = indexSearcher.getTopReaderContext().leaves();
PostingsEnum postingsEnum = null;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java b/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java
index 1a53c713240..a8859909ddc 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java
@@ -20,6 +20,7 @@ package org.apache.solr.handler.component;
import java.io.IOException;
import java.util.*;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
@@ -823,7 +824,6 @@ class StringStatsValues extends AbstractStatsValues {
@Override
public long hash(String v) {
- // NOTE: renamed hashUnencodedChars starting with guava 15
return hasher.hashString(v).asLong();
}
diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
index f59abc2903f..cb1097ee243 100644
--- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
@@ -27,8 +27,8 @@ import java.util.Map;
import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntArrayList;
-import com.carrotsearch.hppc.IntIntOpenHashMap;
-import com.carrotsearch.hppc.IntLongOpenHashMap;
+import com.carrotsearch.hppc.IntIntHashMap;
+import com.carrotsearch.hppc.IntLongHashMap;
import com.carrotsearch.hppc.cursors.IntIntCursor;
import com.carrotsearch.hppc.cursors.IntLongCursor;
import org.apache.lucene.index.DocValues;
@@ -228,8 +228,8 @@ public class CollapsingQParserPlugin extends QParserPlugin {
}
}
- private IntIntOpenHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Map boosted, Map context) throws IOException {
- IntIntOpenHashMap boostDocs = QueryElevationComponent.getBoostDocs(indexSearcher, boosted, context);
+ private IntIntHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Map boosted, Map context) throws IOException {
+ IntIntHashMap boostDocs = QueryElevationComponent.getBoostDocs(indexSearcher, boosted, context);
return boostDocs;
}
@@ -242,7 +242,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
//We have to deal with it here rather then the constructor because
//because the QueryElevationComponent runs after the Queries are constructed.
- IntIntOpenHashMap boostDocsMap = null;
+ IntIntHashMap boostDocsMap = null;
Map context = null;
SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
if(info != null) {
@@ -413,7 +413,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int segments,
SortedDocValues collapseValues,
int nullPolicy,
- IntIntOpenHashMap boostDocsMap) {
+ IntIntHashMap boostDocsMap) {
this.maxDoc = maxDoc;
this.contexts = new LeafReaderContext[segments];
this.collapsedSet = new FixedBitSet(maxDoc);
@@ -608,7 +608,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
private LeafReaderContext[] contexts;
private FixedBitSet collapsedSet;
private NumericDocValues collapseValues;
- private IntLongOpenHashMap cmap;
+ private IntLongHashMap cmap;
private int maxDoc;
private int nullPolicy;
private float nullScore = -Float.MAX_VALUE;
@@ -627,7 +627,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int nullPolicy,
int size,
String field,
- IntIntOpenHashMap boostDocsMap) {
+ IntIntHashMap boostDocsMap) {
this.maxDoc = maxDoc;
this.contexts = new LeafReaderContext[segments];
this.collapsedSet = new FixedBitSet(maxDoc);
@@ -636,7 +636,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
nullScores = new FloatArrayList();
}
- this.cmap = new IntLongOpenHashMap(size);
+ this.cmap = new IntLongHashMap(size);
this.field = field;
if(boostDocsMap != null) {
@@ -680,18 +680,19 @@ public class CollapsingQParserPlugin extends QParserPlugin {
if(collapseValue != nullValue) {
float score = scorer.score();
- if(cmap.containsKey(collapseValue)) {
- long scoreDoc = cmap.lget();
+ final int idx;
+ if((idx = cmap.indexOf(collapseValue)) >= 0) {
+ long scoreDoc = cmap.indexGet(idx);
int testScore = (int)(scoreDoc>>32);
int currentScore = Float.floatToRawIntBits(score);
if(currentScore > testScore) {
//Current score is higher so replace the old scoreDoc with the current scoreDoc
- cmap.lset((((long)currentScore)<<32)+globalDoc);
+ cmap.indexReplace(idx, (((long)currentScore)<<32)+globalDoc);
}
} else {
//Combine the score and document into a long.
long scoreDoc = (((long)Float.floatToRawIntBits(score))<<32)+globalDoc;
- cmap.put(collapseValue, scoreDoc);
+ cmap.indexInsert(idx, collapseValue, scoreDoc);
}
} else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
float score = scorer.score();
@@ -807,7 +808,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
boolean max,
boolean needsScores,
FieldType fieldType,
- IntIntOpenHashMap boostDocs,
+ IntIntHashMap boostDocs,
FunctionQuery funcQuery, IndexSearcher searcher) throws IOException{
this.maxDoc = maxDoc;
@@ -975,7 +976,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
boolean max,
boolean needsScores,
FieldType fieldType,
- IntIntOpenHashMap boostDocsMap,
+ IntIntHashMap boostDocsMap,
FunctionQuery funcQuery,
IndexSearcher searcher) throws IOException{
@@ -1035,7 +1036,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
DocIdSetIterator it = new BitSetIterator(collapseStrategy.getCollapsedSet(), 0); // cost is not useful here
int globalDoc = -1;
int nullScoreIndex = 0;
- IntIntOpenHashMap cmap = collapseStrategy.getCollapseMap();
+ IntIntHashMap cmap = collapseStrategy.getCollapseMap();
int[] docs = collapseStrategy.getDocs();
float[] scores = collapseStrategy.getScores();
FloatArrayList nullScores = collapseStrategy.getNullScores();
@@ -1090,7 +1091,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
String hint,
boolean needsScores,
int size,
- IntIntOpenHashMap boostDocs,
+ IntIntHashMap boostDocs,
SolrIndexSearcher searcher) throws IOException {
@@ -1285,7 +1286,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int nullPolicy,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocsMap,
+ IntIntHashMap boostDocsMap,
SortedDocValues values) {
this.field = field;
this.nullPolicy = nullPolicy;
@@ -1376,7 +1377,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int[] ords,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocs,
+ IntIntHashMap boostDocs,
SortedDocValues values) throws IOException {
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
this.ords = ords;
@@ -1457,7 +1458,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int[] ords,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocs,
+ IntIntHashMap boostDocs,
SortedDocValues values) throws IOException {
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
this.ords = ords;
@@ -1539,7 +1540,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int[] ords,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocs, SortedDocValues values) throws IOException {
+ IntIntHashMap boostDocs, SortedDocValues values) throws IOException {
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
this.ords = ords;
this.ordVals = new long[ords.length];
@@ -1623,7 +1624,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int[] ords,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocs,
+ IntIntHashMap boostDocs,
FunctionQuery funcQuery,
IndexSearcher searcher,
SortedDocValues values) throws IOException {
@@ -1707,7 +1708,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
private abstract class IntFieldValueStrategy {
protected int nullPolicy;
- protected IntIntOpenHashMap cmap;
+ protected IntIntHashMap cmap;
protected Scorer scorer;
protected FloatArrayList nullScores;
protected float nullScore;
@@ -1736,7 +1737,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int nullPolicy,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocsMap) {
+ IntIntHashMap boostDocsMap) {
this.field = field;
this.collapseField = collapseField;
this.nullValue = nullValue;
@@ -1744,7 +1745,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
this.max = max;
this.needsScores = needsScores;
this.collapsedSet = new FixedBitSet(maxDoc);
- this.cmap = new IntIntOpenHashMap(size);
+ this.cmap = new IntIntHashMap(size);
if(boostDocsMap != null) {
this.boosts = true;
this.boostDocs = new IntArrayList();
@@ -1801,7 +1802,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
return nullScores;
}
- public IntIntOpenHashMap getCollapseMap() {
+ public IntIntHashMap getCollapseMap() {
return cmap;
}
@@ -1842,7 +1843,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int nullPolicy,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocs) throws IOException {
+ IntIntHashMap boostDocs) throws IOException {
super(maxDoc, size, collapseField, field, nullValue, nullPolicy, max, needsScores, boostDocs);
@@ -1881,8 +1882,9 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int currentVal = (int) minMaxVals.get(contextDoc);
if(collapseKey != nullValue) {
- if(cmap.containsKey(collapseKey)) {
- int pointer = cmap.lget();
+ final int idx;
+ if((idx = cmap.indexOf(collapseKey)) >= 0) {
+ int pointer = cmap.indexGet(idx);
if(comp.test(currentVal, testValues[pointer])) {
testValues[pointer]= currentVal;
docs[pointer] = globalDoc;
@@ -1942,7 +1944,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int nullPolicy,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocs) throws IOException {
+ IntIntHashMap boostDocs) throws IOException {
super(maxDoc, size, collapseField, field, nullValue, nullPolicy, max, needsScores, boostDocs);
@@ -1982,8 +1984,9 @@ public class CollapsingQParserPlugin extends QParserPlugin {
float currentVal = Float.intBitsToFloat(minMaxVal);
if(collapseKey != nullValue) {
- if(cmap.containsKey(collapseKey)) {
- int pointer = cmap.lget();
+ final int idx;
+ if((idx = cmap.indexOf(collapseKey)) >= 0) {
+ int pointer = cmap.indexGet(idx);
if(comp.test(currentVal, testValues[pointer])) {
testValues[pointer] = currentVal;
docs[pointer] = globalDoc;
@@ -2054,7 +2057,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
int nullPolicy,
boolean max,
boolean needsScores,
- IntIntOpenHashMap boostDocs,
+ IntIntHashMap boostDocs,
FunctionQuery funcQuery,
IndexSearcher searcher) throws IOException {
@@ -2108,8 +2111,9 @@ public class CollapsingQParserPlugin extends QParserPlugin {
float currentVal = functionValues.floatVal(contextDoc);
if(collapseKey != nullValue) {
- if(cmap.containsKey(collapseKey)) {
- int pointer = cmap.lget();
+ final int idx;
+ if((idx = cmap.indexOf(collapseKey)) >= 0) {
+ int pointer = cmap.indexGet(idx);
if(comp.test(currentVal, testValues[pointer])) {
testValues[pointer] = currentVal;
docs[pointer] = globalDoc;
diff --git a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
index f5fb64d3967..8beebe1c46a 100644
--- a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
@@ -23,8 +23,9 @@ import java.util.Comparator;
import java.util.Map;
import java.util.Set;
-import com.carrotsearch.hppc.IntFloatOpenHashMap;
-import com.carrotsearch.hppc.IntIntOpenHashMap;
+import com.carrotsearch.hppc.IntFloatHashMap;
+import com.carrotsearch.hppc.IntIntHashMap;
+
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
@@ -42,7 +43,6 @@ import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.Weight;
-import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
@@ -292,7 +292,7 @@ public class ReRankQParserPlugin extends QParserPlugin {
requestContext = info.getReq().getContext();
}
- IntIntOpenHashMap boostedDocs = QueryElevationComponent.getBoostDocs((SolrIndexSearcher)searcher, boostedPriority, requestContext);
+ IntIntHashMap boostedDocs = QueryElevationComponent.getBoostDocs((SolrIndexSearcher)searcher, boostedPriority, requestContext);
ScoreDoc[] mainScoreDocs = mainDocs.scoreDocs;
ScoreDoc[] reRankScoreDocs = new ScoreDoc[Math.min(mainScoreDocs.length, reRankDocs)];
@@ -391,14 +391,15 @@ public class ReRankQParserPlugin extends QParserPlugin {
}
public class BoostedComp implements Comparator {
- IntFloatOpenHashMap boostedMap;
+ IntFloatHashMap boostedMap;
- public BoostedComp(IntIntOpenHashMap boostedDocs, ScoreDoc[] scoreDocs, float maxScore) {
- this.boostedMap = new IntFloatOpenHashMap(boostedDocs.size()*2);
+ public BoostedComp(IntIntHashMap boostedDocs, ScoreDoc[] scoreDocs, float maxScore) {
+ this.boostedMap = new IntFloatHashMap(boostedDocs.size()*2);
for(int i=0; i= 0) {
+ boostedMap.put(scoreDocs[i].doc, maxScore+boostedDocs.indexGet(idx));
} else {
break;
}
@@ -410,21 +411,16 @@ public class ReRankQParserPlugin extends QParserPlugin {
ScoreDoc doc2 = (ScoreDoc) o2;
float score1 = doc1.score;
float score2 = doc2.score;
- if(boostedMap.containsKey(doc1.doc)) {
- score1 = boostedMap.lget();
+ int idx;
+ if((idx = boostedMap.indexOf(doc1.doc)) >= 0) {
+ score1 = boostedMap.indexGet(idx);
}
- if(boostedMap.containsKey(doc2.doc)) {
- score2 = boostedMap.lget();
+ if((idx = boostedMap.indexOf(doc2.doc)) >= 0) {
+ score2 = boostedMap.indexGet(idx);
}
- if(score1 > score2) {
- return -1;
- } else if(score1 < score2) {
- return 1;
- } else {
- return 0;
- }
+ return -Float.compare(score1, score2);
}
}
}
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/util/hll/HLL.java b/solr/core/src/java/org/apache/solr/util/hll/HLL.java
index 432401bfc0f..39ba80b37e0 100644
--- a/solr/core/src/java/org/apache/solr/util/hll/HLL.java
+++ b/solr/core/src/java/org/apache/solr/util/hll/HLL.java
@@ -19,8 +19,8 @@ package org.apache.solr.util.hll;
import java.util.Arrays;
-import com.carrotsearch.hppc.IntByteOpenHashMap;
-import com.carrotsearch.hppc.LongOpenHashSet;
+import com.carrotsearch.hppc.IntByteHashMap;
+import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.cursors.IntByteCursor;
import com.carrotsearch.hppc.cursors.LongCursor;
@@ -69,9 +69,9 @@ public class HLL implements Cloneable {
// ************************************************************************
// Storage
// storage used when #type is EXPLICIT, null otherwise
- LongOpenHashSet explicitStorage;
+ LongHashSet explicitStorage;
// storage used when #type is SPARSE, null otherwise
- IntByteOpenHashMap sparseProbabilisticStorage;
+ IntByteHashMap sparseProbabilisticStorage;
// storage used when #type is FULL, null otherwise
BitVector probabilisticStorage;
@@ -398,8 +398,9 @@ public class HLL implements Cloneable {
final int j = (int)(rawValue & mBitsMask);
final byte currentValue;
- if (sparseProbabilisticStorage.containsKey(j)) {
- currentValue = sparseProbabilisticStorage.lget();
+ final int index = sparseProbabilisticStorage.indexOf(j);
+ if (index >= 0) {
+ currentValue = sparseProbabilisticStorage.indexGet(index);
} else {
currentValue = 0;
}
@@ -467,10 +468,10 @@ public class HLL implements Cloneable {
// nothing to be done
break;
case EXPLICIT:
- this.explicitStorage = new LongOpenHashSet();
+ this.explicitStorage = new LongHashSet();
break;
case SPARSE:
- this.sparseProbabilisticStorage = new IntByteOpenHashMap();
+ this.sparseProbabilisticStorage = new IntByteHashMap();
break;
case FULL:
this.probabilisticStorage = new BitVector(regwidth, m);
@@ -522,7 +523,7 @@ public class HLL implements Cloneable {
for(int j=0; j