mirror of https://github.com/apache/lucene.git
SOLR-7790, SOLR-7792, SOLR-7791: Update Carrot2 clustering component to version 3.10.3. Upgrade HPPC library to version 0.7.1, morfologik-stemming to version 1.10.0. (Dawid Weiss)
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1697345 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c6d96448ec
commit
c1dc2fb6a6
|
@ -41,16 +41,12 @@
|
||||||
<!-- you might need to tweak this from china so it works -->
|
<!-- you might need to tweak this from china so it works -->
|
||||||
<ibiblio name="working-chinese-mirror" root="http://uk.maven.org/maven2" m2compatible="true" />
|
<ibiblio name="working-chinese-mirror" root="http://uk.maven.org/maven2" m2compatible="true" />
|
||||||
|
|
||||||
<ibiblio name="rr-snapshot" root="https://oss.sonatype.org/content/repositories/comcarrotsearch-1039" m2compatible="true" />
|
|
||||||
|
|
||||||
<!--
|
|
||||||
<filesystem name="local-maven-2" m2compatible="true" local="true">
|
<filesystem name="local-maven-2" m2compatible="true" local="true">
|
||||||
<artifact
|
<artifact
|
||||||
pattern="${local-maven2-dir}/[organisation]/[module]/[revision]/[module]-[revision].[ext]" />
|
pattern="${local-maven2-dir}/[organisation]/[module]/[revision]/[module]-[revision].[ext]" />
|
||||||
<ivy
|
<ivy
|
||||||
pattern="${local-maven2-dir}/[organisation]/[module]/[revision]/[module]-[revision].pom" />
|
pattern="${local-maven2-dir}/[organisation]/[module]/[revision]/[module]-[revision].pom" />
|
||||||
</filesystem>
|
</filesystem>
|
||||||
-->
|
|
||||||
|
|
||||||
<chain name="default" returnFirst="true" checkmodified="true" changingPattern=".*SNAPSHOT">
|
<chain name="default" returnFirst="true" checkmodified="true" changingPattern=".*SNAPSHOT">
|
||||||
<resolver ref="local"/>
|
<resolver ref="local"/>
|
||||||
|
@ -59,7 +55,6 @@
|
||||||
<resolver ref="maven.restlet.org" />
|
<resolver ref="maven.restlet.org" />
|
||||||
<resolver ref="sonatype-releases" />
|
<resolver ref="sonatype-releases" />
|
||||||
<resolver ref="releases.cloudera.com"/>
|
<resolver ref="releases.cloudera.com"/>
|
||||||
<resolver ref="rr-snapshot"/>
|
|
||||||
<!-- <resolver ref="svnkit-releases" /> -->
|
<!-- <resolver ref="svnkit-releases" /> -->
|
||||||
<resolver ref="working-chinese-mirror" />
|
<resolver ref="working-chinese-mirror" />
|
||||||
</chain>
|
</chain>
|
||||||
|
|
|
@ -11,7 +11,7 @@ com.carrotsearch.randomizedtesting.version = 2.1.13
|
||||||
/com.carrotsearch.randomizedtesting/junit4-ant = ${com.carrotsearch.randomizedtesting.version}
|
/com.carrotsearch.randomizedtesting/junit4-ant = ${com.carrotsearch.randomizedtesting.version}
|
||||||
/com.carrotsearch.randomizedtesting/randomizedtesting-runner = ${com.carrotsearch.randomizedtesting.version}
|
/com.carrotsearch.randomizedtesting/randomizedtesting-runner = ${com.carrotsearch.randomizedtesting.version}
|
||||||
|
|
||||||
/com.carrotsearch/hppc = 0.5.2
|
/com.carrotsearch/hppc = 0.7.1
|
||||||
|
|
||||||
com.codahale.metrics.version = 3.0.1
|
com.codahale.metrics.version = 3.0.1
|
||||||
/com.codahale.metrics/metrics-core = ${com.codahale.metrics.version}
|
/com.codahale.metrics/metrics-core = ${com.codahale.metrics.version}
|
||||||
|
@ -211,10 +211,12 @@ org.bouncycastle.version = 1.45
|
||||||
/org.bouncycastle/bcmail-jdk15 = ${org.bouncycastle.version}
|
/org.bouncycastle/bcmail-jdk15 = ${org.bouncycastle.version}
|
||||||
/org.bouncycastle/bcprov-jdk15 = ${org.bouncycastle.version}
|
/org.bouncycastle/bcprov-jdk15 = ${org.bouncycastle.version}
|
||||||
|
|
||||||
/org.carrot2.attributes/attributes-binder = 1.2.1
|
/org.carrot2.attributes/attributes-binder = 1.2.3
|
||||||
/org.carrot2/carrot2-mini = 3.9.0
|
/org.carrot2.shaded/carrot2-guava = 18.0
|
||||||
|
|
||||||
org.carrot2.morfologik.version = 1.7.1
|
/org.carrot2/carrot2-mini = 3.10.3
|
||||||
|
|
||||||
|
org.carrot2.morfologik.version = 1.10.0
|
||||||
/org.carrot2/morfologik-fsa = ${org.carrot2.morfologik.version}
|
/org.carrot2/morfologik-fsa = ${org.carrot2.morfologik.version}
|
||||||
/org.carrot2/morfologik-polish = ${org.carrot2.morfologik.version}
|
/org.carrot2/morfologik-polish = ${org.carrot2.morfologik.version}
|
||||||
/org.carrot2/morfologik-stemming = ${org.carrot2.morfologik.version}
|
/org.carrot2/morfologik-stemming = ${org.carrot2.morfologik.version}
|
||||||
|
@ -286,7 +288,7 @@ org.restlet.jee.version = 2.3.0
|
||||||
/org.restlet.jee/org.restlet = ${org.restlet.jee.version}
|
/org.restlet.jee/org.restlet = ${org.restlet.jee.version}
|
||||||
/org.restlet.jee/org.restlet.ext.servlet = ${org.restlet.jee.version}
|
/org.restlet.jee/org.restlet.ext.servlet = ${org.restlet.jee.version}
|
||||||
|
|
||||||
/org.simpleframework/simple-xml = 2.7
|
/org.simpleframework/simple-xml = 2.7.1
|
||||||
|
|
||||||
org.slf4j.version = 1.7.7
|
org.slf4j.version = 1.7.7
|
||||||
/org.slf4j/jcl-over-slf4j = ${org.slf4j.version}
|
/org.slf4j/jcl-over-slf4j = ${org.slf4j.version}
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
87100c6baf60f096b42b9af06dafeb20f686cd02
|
|
@ -1 +0,0 @@
|
||||||
fdf556c88d66f65440bd24024f55a52c227c0e3f
|
|
|
@ -0,0 +1 @@
|
||||||
|
0f8eeb58acb5a39e162c0d49fcf29a70744cc2bc
|
|
@ -1 +0,0 @@
|
||||||
e03b9feb39f6e2c0ac7c37e220d01cdae66d3a28
|
|
|
@ -0,0 +1 @@
|
||||||
|
a74ad7ceb29ff1d8194eb161f5b2dfbd636626a5
|
|
@ -1 +0,0 @@
|
||||||
c81d6c63e22e97819063cad7f1ecd20269cba720
|
|
|
@ -164,6 +164,10 @@ Optimizations
|
||||||
Other Changes
|
Other Changes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
* SOLR-7790, SOLR-7792, SOLR-7791: Update Carrot2 clustering component to
|
||||||
|
version 3.10.3. Upgrade HPPC library to version 0.7.1, morfologik-stemming
|
||||||
|
to version 1.10.0. (Dawid Weiss)
|
||||||
|
|
||||||
* SOLR-7831: Start Scripts: Allow a configurable stack size [-Xss] (Steve Davids via Mark Miller)
|
* SOLR-7831: Start Scripts: Allow a configurable stack size [-Xss] (Steve Davids via Mark Miller)
|
||||||
|
|
||||||
* SOLR-7870: Write a test which asserts that requests to stateFormat=2 collection succeed on a node
|
* SOLR-7870: Write a test which asserts that requests to stateFormat=2 collection succeed on a node
|
||||||
|
|
|
@ -24,26 +24,23 @@
|
||||||
</configurations>
|
</configurations>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency org="org.carrot2" name="carrot2-mini" rev="${/org.carrot2/carrot2-mini}" conf="compile"/>
|
<dependency org="org.carrot2" name="carrot2-mini" rev="${/org.carrot2/carrot2-mini}" conf="compile"/>
|
||||||
|
<dependency org="org.carrot2.shaded" name="carrot2-guava" rev="${/org.carrot2.shaded/carrot2-guava}" conf="compile"/>
|
||||||
<dependency org="com.carrotsearch" name="hppc" rev="${/com.carrotsearch/hppc}" conf="compile"/>
|
|
||||||
<dependency org="org.carrot2.attributes" name="attributes-binder" rev="${/org.carrot2.attributes/attributes-binder}" conf="compile"/>
|
<dependency org="org.carrot2.attributes" name="attributes-binder" rev="${/org.carrot2.attributes/attributes-binder}" conf="compile"/>
|
||||||
|
|
||||||
<dependency org="org.simpleframework" name="simple-xml" rev="${/org.simpleframework/simple-xml}" conf="compile"/>
|
<dependency org="org.simpleframework" name="simple-xml" rev="${/org.simpleframework/simple-xml}" conf="compile"/>
|
||||||
|
|
||||||
<dependency org="org.apache.mahout" name="mahout-math" rev="${/org.apache.mahout/mahout-math}" conf="compile"/>
|
<dependency org="org.apache.mahout" name="mahout-math" rev="${/org.apache.mahout/mahout-math}" conf="compile"/>
|
||||||
<dependency org="org.apache.mahout" name="mahout-collections" rev="${/org.apache.mahout/mahout-collections}" conf="compile"/>
|
<dependency org="org.apache.mahout" name="mahout-collections" rev="${/org.apache.mahout/mahout-collections}" conf="compile"/>
|
||||||
|
|
||||||
<dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="${/org.codehaus.jackson/jackson-core-asl}" conf="compile"/>
|
<dependency org="com.fasterxml.jackson.core" name="jackson-annotations" rev="${/com.fasterxml.jackson.core/jackson-annotations}" conf="compile"/>
|
||||||
<dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="${/org.codehaus.jackson/jackson-mapper-asl}" conf="compile"/>
|
<dependency org="com.fasterxml.jackson.core" name="jackson-databind" rev="${/com.fasterxml.jackson.core/jackson-databind}" conf="compile"/>
|
||||||
|
|
||||||
<!--
|
|
||||||
Included as part of Solr's environment.
|
|
||||||
|
|
||||||
com.google.guava:guava:jar:14.0.1:compile
|
|
||||||
commons-lang:commons-lang:jar:2.6:compile
|
|
||||||
-->
|
|
||||||
|
|
||||||
<dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test"/>
|
<dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test"/>
|
||||||
|
|
||||||
|
<!--
|
||||||
|
NOTE: There are dependencies that are part of core Solr server (jackson-core, HPPC, etc.).
|
||||||
|
-->
|
||||||
|
|
||||||
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
|
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</ivy-module>
|
</ivy-module>
|
||||||
|
|
|
@ -376,7 +376,7 @@ public class CarrotClusteringEngineTest extends AbstractClusteringTestCase {
|
||||||
params.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet");
|
params.add(CarrotParams.SNIPPET_FIELD_NAME, "snippet");
|
||||||
|
|
||||||
final List<String> labels = getLabels(checkEngine(
|
final List<String> labels = getLabels(checkEngine(
|
||||||
getClusteringEngine("custom-duplicating-tokenizer"), 1, 16, new TermQuery(new Term("title",
|
getClusteringEngine("custom-duplicating-tokenizer"), 1, 15, new TermQuery(new Term("title",
|
||||||
"field")), params).get(0));
|
"field")), params).get(0));
|
||||||
|
|
||||||
// The custom test tokenizer duplicates each token's text
|
// The custom test tokenizer duplicates each token's text
|
||||||
|
|
|
@ -27,11 +27,11 @@ import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntObjectOpenHashMap;
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import com.carrotsearch.hppc.IntOpenHashSet;
|
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||||
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
|
import com.carrotsearch.hppc.LongObjectHashMap;
|
||||||
import com.carrotsearch.hppc.LongObjectMap;
|
import com.carrotsearch.hppc.LongObjectMap;
|
||||||
import com.carrotsearch.hppc.LongObjectOpenHashMap;
|
|
||||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
|
||||||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.LongCursor;
|
import com.carrotsearch.hppc.cursors.LongCursor;
|
||||||
import com.carrotsearch.hppc.cursors.LongObjectCursor;
|
import com.carrotsearch.hppc.cursors.LongObjectCursor;
|
||||||
|
@ -247,9 +247,9 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
}
|
}
|
||||||
|
|
||||||
FixedBitSet groupBits = null;
|
FixedBitSet groupBits = null;
|
||||||
LongOpenHashSet groupSet = null;
|
LongHashSet groupSet = null;
|
||||||
DocList docList = rb.getResults().docList;
|
DocList docList = rb.getResults().docList;
|
||||||
IntOpenHashSet collapsedSet = new IntOpenHashSet(docList.size() * 2);
|
IntHashSet collapsedSet = new IntHashSet(docList.size() * 2);
|
||||||
|
|
||||||
//Gather the groups for the current page of documents
|
//Gather the groups for the current page of documents
|
||||||
DocIterator idit = docList.iterator();
|
DocIterator idit = docList.iterator();
|
||||||
|
@ -269,7 +269,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
int currentContext = 0;
|
int currentContext = 0;
|
||||||
int currentDocBase = contexts.get(currentContext).docBase;
|
int currentDocBase = contexts.get(currentContext).docBase;
|
||||||
int nextDocBase = (currentContext+1)<contexts.size() ? contexts.get(currentContext+1).docBase : Integer.MAX_VALUE;
|
int nextDocBase = (currentContext+1)<contexts.size() ? contexts.get(currentContext+1).docBase : Integer.MAX_VALUE;
|
||||||
IntObjectOpenHashMap<BytesRef> ordBytes = null;
|
IntObjectHashMap<BytesRef> ordBytes = null;
|
||||||
if(values != null) {
|
if(values != null) {
|
||||||
groupBits = new FixedBitSet(values.getValueCount());
|
groupBits = new FixedBitSet(values.getValueCount());
|
||||||
MultiDocValues.OrdinalMap ordinalMap = null;
|
MultiDocValues.OrdinalMap ordinalMap = null;
|
||||||
|
@ -284,7 +284,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
}
|
}
|
||||||
int count = 0;
|
int count = 0;
|
||||||
|
|
||||||
ordBytes = new IntObjectOpenHashMap();
|
ordBytes = new IntObjectHashMap<>();
|
||||||
|
|
||||||
for(int i=0; i<globalDocs.length; i++) {
|
for(int i=0; i<globalDocs.length; i++) {
|
||||||
int globalDoc = globalDocs[i];
|
int globalDoc = globalDocs[i];
|
||||||
|
@ -329,7 +329,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
groupSet = new LongOpenHashSet((int)(docList.size()*1.25));
|
groupSet = new LongHashSet(docList.size());
|
||||||
NumericDocValues collapseValues = contexts.get(currentContext).reader().getNumericDocValues(field);
|
NumericDocValues collapseValues = contexts.get(currentContext).reader().getNumericDocValues(field);
|
||||||
int count = 0;
|
int count = 0;
|
||||||
for(int i=0; i<globalDocs.length; i++) {
|
for(int i=0; i<globalDocs.length; i++) {
|
||||||
|
@ -389,12 +389,12 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
.build();
|
.build();
|
||||||
searcher.search(q, collector);
|
searcher.search(q, collector);
|
||||||
}
|
}
|
||||||
LongObjectMap groups = ((GroupCollector)groupExpandCollector).getGroups();
|
LongObjectMap<Collector> groups = ((GroupCollector) groupExpandCollector).getGroups();
|
||||||
NamedList outMap = new SimpleOrderedMap();
|
NamedList outMap = new SimpleOrderedMap();
|
||||||
CharsRefBuilder charsRef = new CharsRefBuilder();
|
CharsRefBuilder charsRef = new CharsRefBuilder();
|
||||||
for (LongObjectCursor cursor : (Iterable<LongObjectCursor>) groups) {
|
for (LongObjectCursor<Collector> cursor : groups) {
|
||||||
long groupValue = cursor.key;
|
long groupValue = cursor.key;
|
||||||
TopDocsCollector topDocsCollector = (TopDocsCollector) cursor.value;
|
TopDocsCollector<?> topDocsCollector = TopDocsCollector.class.cast(cursor.value);
|
||||||
TopDocs topDocs = topDocsCollector.topDocs();
|
TopDocs topDocs = topDocsCollector.topDocs();
|
||||||
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
|
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
|
||||||
if (scoreDocs.length > 0) {
|
if (scoreDocs.length > 0) {
|
||||||
|
@ -502,11 +502,11 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
|
|
||||||
private LongObjectMap<Collector> groups;
|
private LongObjectMap<Collector> groups;
|
||||||
private FixedBitSet groupBits;
|
private FixedBitSet groupBits;
|
||||||
private IntOpenHashSet collapsedSet;
|
private IntHashSet collapsedSet;
|
||||||
|
|
||||||
public GroupExpandCollector(SortedDocValues docValues, FixedBitSet groupBits, IntOpenHashSet collapsedSet, int limit, Sort sort) throws IOException {
|
public GroupExpandCollector(SortedDocValues docValues, FixedBitSet groupBits, IntHashSet collapsedSet, int limit, Sort sort) throws IOException {
|
||||||
int numGroups = collapsedSet.size();
|
int numGroups = collapsedSet.size();
|
||||||
groups = new LongObjectOpenHashMap<>(numGroups * 2);
|
groups = new LongObjectHashMap<>(numGroups);
|
||||||
DocIdSetIterator iterator = new BitSetIterator(groupBits, 0); // cost is not useful here
|
DocIdSetIterator iterator = new BitSetIterator(groupBits, 0); // cost is not useful here
|
||||||
int group;
|
int group;
|
||||||
while ((group = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
while ((group = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
|
||||||
|
@ -536,7 +536,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
this.segmentOrdinalMap = ordinalMap.getGlobalOrds(context.ord);
|
this.segmentOrdinalMap = ordinalMap.getGlobalOrds(context.ord);
|
||||||
}
|
}
|
||||||
|
|
||||||
final LongObjectMap<LeafCollector> leafCollectors = new LongObjectOpenHashMap<>();
|
final LongObjectMap<LeafCollector> leafCollectors = new LongObjectHashMap<>();
|
||||||
for (LongObjectCursor<Collector> entry : groups) {
|
for (LongObjectCursor<Collector> entry : groups) {
|
||||||
leafCollectors.put(entry.key, entry.value.getLeafCollector(context));
|
leafCollectors.put(entry.key, entry.value.getLeafCollector(context));
|
||||||
}
|
}
|
||||||
|
@ -578,17 +578,16 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
private class NumericGroupExpandCollector implements Collector, GroupCollector {
|
private class NumericGroupExpandCollector implements Collector, GroupCollector {
|
||||||
private NumericDocValues docValues;
|
private NumericDocValues docValues;
|
||||||
|
|
||||||
|
|
||||||
private String field;
|
private String field;
|
||||||
private LongObjectOpenHashMap<Collector> groups;
|
private LongObjectHashMap<Collector> groups;
|
||||||
|
|
||||||
private IntOpenHashSet collapsedSet;
|
private IntHashSet collapsedSet;
|
||||||
private long nullValue;
|
private long nullValue;
|
||||||
|
|
||||||
public NumericGroupExpandCollector(String field, long nullValue, LongOpenHashSet groupSet, IntOpenHashSet collapsedSet, int limit, Sort sort) throws IOException {
|
public NumericGroupExpandCollector(String field, long nullValue, LongHashSet groupSet, IntHashSet collapsedSet, int limit, Sort sort) throws IOException {
|
||||||
int numGroups = collapsedSet.size();
|
int numGroups = collapsedSet.size();
|
||||||
this.nullValue = nullValue;
|
this.nullValue = nullValue;
|
||||||
groups = new LongObjectOpenHashMap(numGroups * 2);
|
groups = new LongObjectHashMap<>(numGroups);
|
||||||
Iterator<LongCursor> iterator = groupSet.iterator();
|
Iterator<LongCursor> iterator = groupSet.iterator();
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
LongCursor cursor = iterator.next();
|
LongCursor cursor = iterator.next();
|
||||||
|
@ -609,7 +608,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
final int docBase = context.docBase;
|
final int docBase = context.docBase;
|
||||||
this.docValues = context.reader().getNumericDocValues(this.field);
|
this.docValues = context.reader().getNumericDocValues(this.field);
|
||||||
|
|
||||||
final LongObjectOpenHashMap<LeafCollector> leafCollectors = new LongObjectOpenHashMap<>();
|
final LongObjectHashMap<LeafCollector> leafCollectors = new LongObjectHashMap<>();
|
||||||
|
|
||||||
for (LongObjectCursor<Collector> entry : groups) {
|
for (LongObjectCursor<Collector> entry : groups) {
|
||||||
leafCollectors.put(entry.key, entry.value.getLeafCollector(context));
|
leafCollectors.put(entry.key, entry.value.getLeafCollector(context));
|
||||||
|
@ -627,29 +626,30 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
@Override
|
@Override
|
||||||
public void collect(int docId) throws IOException {
|
public void collect(int docId) throws IOException {
|
||||||
long value = docValues.get(docId);
|
long value = docValues.get(docId);
|
||||||
if (value != nullValue && leafCollectors.containsKey(value) && !collapsedSet.contains(docId + docBase)) {
|
final int index;
|
||||||
LeafCollector c = leafCollectors.lget();
|
if (value != nullValue &&
|
||||||
c.collect(docId);
|
(index = leafCollectors.indexOf(value)) >= 0 &&
|
||||||
|
!collapsedSet.contains(docId + docBase)) {
|
||||||
|
leafCollectors.indexGet(index).collect(docId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public LongObjectOpenHashMap<Collector> getGroups() {
|
public LongObjectHashMap<Collector> getGroups() {
|
||||||
return groups;
|
return groups;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private interface GroupCollector {
|
private interface GroupCollector {
|
||||||
public LongObjectMap getGroups();
|
public LongObjectMap<Collector> getGroups();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Query getGroupQuery(String fname,
|
private Query getGroupQuery(String fname,
|
||||||
FieldType ft,
|
FieldType ft,
|
||||||
int size,
|
int size,
|
||||||
LongOpenHashSet groupSet) {
|
LongHashSet groupSet) {
|
||||||
|
|
||||||
BytesRef[] bytesRefs = new BytesRef[size];
|
BytesRef[] bytesRefs = new BytesRef[size];
|
||||||
BytesRefBuilder term = new BytesRefBuilder();
|
BytesRefBuilder term = new BytesRefBuilder();
|
||||||
|
@ -676,8 +676,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
|
|
||||||
private Query getGroupQuery(String fname,
|
private Query getGroupQuery(String fname,
|
||||||
int size,
|
int size,
|
||||||
IntObjectOpenHashMap<BytesRef> ordBytes) throws Exception {
|
IntObjectHashMap<BytesRef> ordBytes) throws Exception {
|
||||||
|
|
||||||
BytesRef[] bytesRefs = new BytesRef[size];
|
BytesRef[] bytesRefs = new BytesRef[size];
|
||||||
int index = -1;
|
int index = -1;
|
||||||
Iterator<IntObjectCursor<BytesRef>>it = ordBytes.iterator();
|
Iterator<IntObjectCursor<BytesRef>>it = ordBytes.iterator();
|
||||||
|
@ -728,7 +727,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
|
||||||
|
|
||||||
public FieldInfos getFieldInfos() {
|
public FieldInfos getFieldInfos() {
|
||||||
Iterator<FieldInfo> it = in.getFieldInfos().iterator();
|
Iterator<FieldInfo> it = in.getFieldInfos().iterator();
|
||||||
List<FieldInfo> newInfos = new ArrayList();
|
List<FieldInfo> newInfos = new ArrayList<>();
|
||||||
while(it.hasNext()) {
|
while(it.hasNext()) {
|
||||||
FieldInfo fieldInfo = it.next();
|
FieldInfo fieldInfo = it.next();
|
||||||
|
|
||||||
|
|
|
@ -17,8 +17,6 @@
|
||||||
|
|
||||||
package org.apache.solr.handler.component;
|
package org.apache.solr.handler.component;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||||
|
@ -73,6 +71,8 @@ import org.w3c.dom.Node;
|
||||||
import org.w3c.dom.NodeList;
|
import org.w3c.dom.NodeList;
|
||||||
import org.xml.sax.InputSource;
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
|
|
||||||
import javax.xml.xpath.XPath;
|
import javax.xml.xpath.XPath;
|
||||||
import javax.xml.xpath.XPathConstants;
|
import javax.xml.xpath.XPathConstants;
|
||||||
import javax.xml.xpath.XPathExpressionException;
|
import javax.xml.xpath.XPathExpressionException;
|
||||||
|
@ -536,16 +536,16 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static IntIntOpenHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Map<BytesRef, Integer>boosted, Map context) throws IOException {
|
public static IntIntHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Map<BytesRef, Integer>boosted, Map context) throws IOException {
|
||||||
|
|
||||||
IntIntOpenHashMap boostDocs = null;
|
IntIntHashMap boostDocs = null;
|
||||||
|
|
||||||
if(boosted != null) {
|
if(boosted != null) {
|
||||||
|
|
||||||
//First see if it's already in the request context. Could have been put there
|
//First see if it's already in the request context. Could have been put there
|
||||||
//by another caller.
|
//by another caller.
|
||||||
if(context != null) {
|
if(context != null) {
|
||||||
boostDocs = (IntIntOpenHashMap)context.get(BOOSTED_DOCIDS);
|
boostDocs = (IntIntHashMap) context.get(BOOSTED_DOCIDS);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(boostDocs != null) {
|
if(boostDocs != null) {
|
||||||
|
@ -555,13 +555,13 @@ public class QueryElevationComponent extends SearchComponent implements SolrCore
|
||||||
|
|
||||||
SchemaField idField = indexSearcher.getSchema().getUniqueKeyField();
|
SchemaField idField = indexSearcher.getSchema().getUniqueKeyField();
|
||||||
String fieldName = idField.getName();
|
String fieldName = idField.getName();
|
||||||
HashSet<BytesRef> localBoosts = new HashSet(boosted.size()*2);
|
HashSet<BytesRef> localBoosts = new HashSet<>(boosted.size()*2);
|
||||||
Iterator<BytesRef> boostedIt = boosted.keySet().iterator();
|
Iterator<BytesRef> boostedIt = boosted.keySet().iterator();
|
||||||
while(boostedIt.hasNext()) {
|
while(boostedIt.hasNext()) {
|
||||||
localBoosts.add(boostedIt.next());
|
localBoosts.add(boostedIt.next());
|
||||||
}
|
}
|
||||||
|
|
||||||
boostDocs = new IntIntOpenHashMap(boosted.size()*2);
|
boostDocs = new IntIntHashMap(boosted.size());
|
||||||
|
|
||||||
List<LeafReaderContext>leaves = indexSearcher.getTopReaderContext().leaves();
|
List<LeafReaderContext>leaves = indexSearcher.getTopReaderContext().leaves();
|
||||||
PostingsEnum postingsEnum = null;
|
PostingsEnum postingsEnum = null;
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.solr.handler.component;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.queries.function.FunctionValues;
|
import org.apache.lucene.queries.function.FunctionValues;
|
||||||
|
@ -823,7 +824,6 @@ class StringStatsValues extends AbstractStatsValues<String> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long hash(String v) {
|
public long hash(String v) {
|
||||||
// NOTE: renamed hashUnencodedChars starting with guava 15
|
|
||||||
return hasher.hashString(v).asLong();
|
return hasher.hashString(v).asLong();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,8 +27,8 @@ import java.util.Map;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.FloatArrayList;
|
import com.carrotsearch.hppc.FloatArrayList;
|
||||||
import com.carrotsearch.hppc.IntArrayList;
|
import com.carrotsearch.hppc.IntArrayList;
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
import com.carrotsearch.hppc.IntLongOpenHashMap;
|
import com.carrotsearch.hppc.IntLongHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.IntIntCursor;
|
import com.carrotsearch.hppc.cursors.IntIntCursor;
|
||||||
import com.carrotsearch.hppc.cursors.IntLongCursor;
|
import com.carrotsearch.hppc.cursors.IntLongCursor;
|
||||||
import org.apache.lucene.index.DocValues;
|
import org.apache.lucene.index.DocValues;
|
||||||
|
@ -228,8 +228,8 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private IntIntOpenHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Map<BytesRef, Integer> boosted, Map context) throws IOException {
|
private IntIntHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Map<BytesRef, Integer> boosted, Map context) throws IOException {
|
||||||
IntIntOpenHashMap boostDocs = QueryElevationComponent.getBoostDocs(indexSearcher, boosted, context);
|
IntIntHashMap boostDocs = QueryElevationComponent.getBoostDocs(indexSearcher, boosted, context);
|
||||||
return boostDocs;
|
return boostDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -242,7 +242,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
//We have to deal with it here rather then the constructor because
|
//We have to deal with it here rather then the constructor because
|
||||||
//because the QueryElevationComponent runs after the Queries are constructed.
|
//because the QueryElevationComponent runs after the Queries are constructed.
|
||||||
|
|
||||||
IntIntOpenHashMap boostDocsMap = null;
|
IntIntHashMap boostDocsMap = null;
|
||||||
Map context = null;
|
Map context = null;
|
||||||
SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
|
SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
|
||||||
if(info != null) {
|
if(info != null) {
|
||||||
|
@ -413,7 +413,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int segments,
|
int segments,
|
||||||
SortedDocValues collapseValues,
|
SortedDocValues collapseValues,
|
||||||
int nullPolicy,
|
int nullPolicy,
|
||||||
IntIntOpenHashMap boostDocsMap) {
|
IntIntHashMap boostDocsMap) {
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
this.contexts = new LeafReaderContext[segments];
|
this.contexts = new LeafReaderContext[segments];
|
||||||
this.collapsedSet = new FixedBitSet(maxDoc);
|
this.collapsedSet = new FixedBitSet(maxDoc);
|
||||||
|
@ -608,7 +608,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
private LeafReaderContext[] contexts;
|
private LeafReaderContext[] contexts;
|
||||||
private FixedBitSet collapsedSet;
|
private FixedBitSet collapsedSet;
|
||||||
private NumericDocValues collapseValues;
|
private NumericDocValues collapseValues;
|
||||||
private IntLongOpenHashMap cmap;
|
private IntLongHashMap cmap;
|
||||||
private int maxDoc;
|
private int maxDoc;
|
||||||
private int nullPolicy;
|
private int nullPolicy;
|
||||||
private float nullScore = -Float.MAX_VALUE;
|
private float nullScore = -Float.MAX_VALUE;
|
||||||
|
@ -627,7 +627,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int nullPolicy,
|
int nullPolicy,
|
||||||
int size,
|
int size,
|
||||||
String field,
|
String field,
|
||||||
IntIntOpenHashMap boostDocsMap) {
|
IntIntHashMap boostDocsMap) {
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
this.contexts = new LeafReaderContext[segments];
|
this.contexts = new LeafReaderContext[segments];
|
||||||
this.collapsedSet = new FixedBitSet(maxDoc);
|
this.collapsedSet = new FixedBitSet(maxDoc);
|
||||||
|
@ -636,7 +636,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
|
if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
|
||||||
nullScores = new FloatArrayList();
|
nullScores = new FloatArrayList();
|
||||||
}
|
}
|
||||||
this.cmap = new IntLongOpenHashMap(size);
|
this.cmap = new IntLongHashMap(size);
|
||||||
this.field = field;
|
this.field = field;
|
||||||
|
|
||||||
if(boostDocsMap != null) {
|
if(boostDocsMap != null) {
|
||||||
|
@ -680,18 +680,19 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
|
|
||||||
if(collapseValue != nullValue) {
|
if(collapseValue != nullValue) {
|
||||||
float score = scorer.score();
|
float score = scorer.score();
|
||||||
if(cmap.containsKey(collapseValue)) {
|
final int idx;
|
||||||
long scoreDoc = cmap.lget();
|
if((idx = cmap.indexOf(collapseValue)) >= 0) {
|
||||||
|
long scoreDoc = cmap.indexGet(idx);
|
||||||
int testScore = (int)(scoreDoc>>32);
|
int testScore = (int)(scoreDoc>>32);
|
||||||
int currentScore = Float.floatToRawIntBits(score);
|
int currentScore = Float.floatToRawIntBits(score);
|
||||||
if(currentScore > testScore) {
|
if(currentScore > testScore) {
|
||||||
//Current score is higher so replace the old scoreDoc with the current scoreDoc
|
//Current score is higher so replace the old scoreDoc with the current scoreDoc
|
||||||
cmap.lset((((long)currentScore)<<32)+globalDoc);
|
cmap.indexReplace(idx, (((long)currentScore)<<32)+globalDoc);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
//Combine the score and document into a long.
|
//Combine the score and document into a long.
|
||||||
long scoreDoc = (((long)Float.floatToRawIntBits(score))<<32)+globalDoc;
|
long scoreDoc = (((long)Float.floatToRawIntBits(score))<<32)+globalDoc;
|
||||||
cmap.put(collapseValue, scoreDoc);
|
cmap.indexInsert(idx, collapseValue, scoreDoc);
|
||||||
}
|
}
|
||||||
} else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
|
} else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
|
||||||
float score = scorer.score();
|
float score = scorer.score();
|
||||||
|
@ -807,7 +808,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
FieldType fieldType,
|
FieldType fieldType,
|
||||||
IntIntOpenHashMap boostDocs,
|
IntIntHashMap boostDocs,
|
||||||
FunctionQuery funcQuery, IndexSearcher searcher) throws IOException{
|
FunctionQuery funcQuery, IndexSearcher searcher) throws IOException{
|
||||||
|
|
||||||
this.maxDoc = maxDoc;
|
this.maxDoc = maxDoc;
|
||||||
|
@ -975,7 +976,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
FieldType fieldType,
|
FieldType fieldType,
|
||||||
IntIntOpenHashMap boostDocsMap,
|
IntIntHashMap boostDocsMap,
|
||||||
FunctionQuery funcQuery,
|
FunctionQuery funcQuery,
|
||||||
IndexSearcher searcher) throws IOException{
|
IndexSearcher searcher) throws IOException{
|
||||||
|
|
||||||
|
@ -1035,7 +1036,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
DocIdSetIterator it = new BitSetIterator(collapseStrategy.getCollapsedSet(), 0); // cost is not useful here
|
DocIdSetIterator it = new BitSetIterator(collapseStrategy.getCollapsedSet(), 0); // cost is not useful here
|
||||||
int globalDoc = -1;
|
int globalDoc = -1;
|
||||||
int nullScoreIndex = 0;
|
int nullScoreIndex = 0;
|
||||||
IntIntOpenHashMap cmap = collapseStrategy.getCollapseMap();
|
IntIntHashMap cmap = collapseStrategy.getCollapseMap();
|
||||||
int[] docs = collapseStrategy.getDocs();
|
int[] docs = collapseStrategy.getDocs();
|
||||||
float[] scores = collapseStrategy.getScores();
|
float[] scores = collapseStrategy.getScores();
|
||||||
FloatArrayList nullScores = collapseStrategy.getNullScores();
|
FloatArrayList nullScores = collapseStrategy.getNullScores();
|
||||||
|
@ -1090,7 +1091,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
String hint,
|
String hint,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
int size,
|
int size,
|
||||||
IntIntOpenHashMap boostDocs,
|
IntIntHashMap boostDocs,
|
||||||
SolrIndexSearcher searcher) throws IOException {
|
SolrIndexSearcher searcher) throws IOException {
|
||||||
|
|
||||||
|
|
||||||
|
@ -1285,7 +1286,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int nullPolicy,
|
int nullPolicy,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocsMap,
|
IntIntHashMap boostDocsMap,
|
||||||
SortedDocValues values) {
|
SortedDocValues values) {
|
||||||
this.field = field;
|
this.field = field;
|
||||||
this.nullPolicy = nullPolicy;
|
this.nullPolicy = nullPolicy;
|
||||||
|
@ -1376,7 +1377,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int[] ords,
|
int[] ords,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocs,
|
IntIntHashMap boostDocs,
|
||||||
SortedDocValues values) throws IOException {
|
SortedDocValues values) throws IOException {
|
||||||
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
|
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
|
||||||
this.ords = ords;
|
this.ords = ords;
|
||||||
|
@ -1457,7 +1458,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int[] ords,
|
int[] ords,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocs,
|
IntIntHashMap boostDocs,
|
||||||
SortedDocValues values) throws IOException {
|
SortedDocValues values) throws IOException {
|
||||||
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
|
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
|
||||||
this.ords = ords;
|
this.ords = ords;
|
||||||
|
@ -1539,7 +1540,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int[] ords,
|
int[] ords,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocs, SortedDocValues values) throws IOException {
|
IntIntHashMap boostDocs, SortedDocValues values) throws IOException {
|
||||||
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
|
super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
|
||||||
this.ords = ords;
|
this.ords = ords;
|
||||||
this.ordVals = new long[ords.length];
|
this.ordVals = new long[ords.length];
|
||||||
|
@ -1623,7 +1624,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int[] ords,
|
int[] ords,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocs,
|
IntIntHashMap boostDocs,
|
||||||
FunctionQuery funcQuery,
|
FunctionQuery funcQuery,
|
||||||
IndexSearcher searcher,
|
IndexSearcher searcher,
|
||||||
SortedDocValues values) throws IOException {
|
SortedDocValues values) throws IOException {
|
||||||
|
@ -1707,7 +1708,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
|
|
||||||
private abstract class IntFieldValueStrategy {
|
private abstract class IntFieldValueStrategy {
|
||||||
protected int nullPolicy;
|
protected int nullPolicy;
|
||||||
protected IntIntOpenHashMap cmap;
|
protected IntIntHashMap cmap;
|
||||||
protected Scorer scorer;
|
protected Scorer scorer;
|
||||||
protected FloatArrayList nullScores;
|
protected FloatArrayList nullScores;
|
||||||
protected float nullScore;
|
protected float nullScore;
|
||||||
|
@ -1736,7 +1737,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int nullPolicy,
|
int nullPolicy,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocsMap) {
|
IntIntHashMap boostDocsMap) {
|
||||||
this.field = field;
|
this.field = field;
|
||||||
this.collapseField = collapseField;
|
this.collapseField = collapseField;
|
||||||
this.nullValue = nullValue;
|
this.nullValue = nullValue;
|
||||||
|
@ -1744,7 +1745,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
this.max = max;
|
this.max = max;
|
||||||
this.needsScores = needsScores;
|
this.needsScores = needsScores;
|
||||||
this.collapsedSet = new FixedBitSet(maxDoc);
|
this.collapsedSet = new FixedBitSet(maxDoc);
|
||||||
this.cmap = new IntIntOpenHashMap(size);
|
this.cmap = new IntIntHashMap(size);
|
||||||
if(boostDocsMap != null) {
|
if(boostDocsMap != null) {
|
||||||
this.boosts = true;
|
this.boosts = true;
|
||||||
this.boostDocs = new IntArrayList();
|
this.boostDocs = new IntArrayList();
|
||||||
|
@ -1801,7 +1802,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
return nullScores;
|
return nullScores;
|
||||||
}
|
}
|
||||||
|
|
||||||
public IntIntOpenHashMap getCollapseMap() {
|
public IntIntHashMap getCollapseMap() {
|
||||||
return cmap;
|
return cmap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1842,7 +1843,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int nullPolicy,
|
int nullPolicy,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocs) throws IOException {
|
IntIntHashMap boostDocs) throws IOException {
|
||||||
|
|
||||||
super(maxDoc, size, collapseField, field, nullValue, nullPolicy, max, needsScores, boostDocs);
|
super(maxDoc, size, collapseField, field, nullValue, nullPolicy, max, needsScores, boostDocs);
|
||||||
|
|
||||||
|
@ -1881,8 +1882,9 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int currentVal = (int) minMaxVals.get(contextDoc);
|
int currentVal = (int) minMaxVals.get(contextDoc);
|
||||||
|
|
||||||
if(collapseKey != nullValue) {
|
if(collapseKey != nullValue) {
|
||||||
if(cmap.containsKey(collapseKey)) {
|
final int idx;
|
||||||
int pointer = cmap.lget();
|
if((idx = cmap.indexOf(collapseKey)) >= 0) {
|
||||||
|
int pointer = cmap.indexGet(idx);
|
||||||
if(comp.test(currentVal, testValues[pointer])) {
|
if(comp.test(currentVal, testValues[pointer])) {
|
||||||
testValues[pointer]= currentVal;
|
testValues[pointer]= currentVal;
|
||||||
docs[pointer] = globalDoc;
|
docs[pointer] = globalDoc;
|
||||||
|
@ -1942,7 +1944,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int nullPolicy,
|
int nullPolicy,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocs) throws IOException {
|
IntIntHashMap boostDocs) throws IOException {
|
||||||
|
|
||||||
super(maxDoc, size, collapseField, field, nullValue, nullPolicy, max, needsScores, boostDocs);
|
super(maxDoc, size, collapseField, field, nullValue, nullPolicy, max, needsScores, boostDocs);
|
||||||
|
|
||||||
|
@ -1982,8 +1984,9 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
float currentVal = Float.intBitsToFloat(minMaxVal);
|
float currentVal = Float.intBitsToFloat(minMaxVal);
|
||||||
|
|
||||||
if(collapseKey != nullValue) {
|
if(collapseKey != nullValue) {
|
||||||
if(cmap.containsKey(collapseKey)) {
|
final int idx;
|
||||||
int pointer = cmap.lget();
|
if((idx = cmap.indexOf(collapseKey)) >= 0) {
|
||||||
|
int pointer = cmap.indexGet(idx);
|
||||||
if(comp.test(currentVal, testValues[pointer])) {
|
if(comp.test(currentVal, testValues[pointer])) {
|
||||||
testValues[pointer] = currentVal;
|
testValues[pointer] = currentVal;
|
||||||
docs[pointer] = globalDoc;
|
docs[pointer] = globalDoc;
|
||||||
|
@ -2054,7 +2057,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
int nullPolicy,
|
int nullPolicy,
|
||||||
boolean max,
|
boolean max,
|
||||||
boolean needsScores,
|
boolean needsScores,
|
||||||
IntIntOpenHashMap boostDocs,
|
IntIntHashMap boostDocs,
|
||||||
FunctionQuery funcQuery,
|
FunctionQuery funcQuery,
|
||||||
IndexSearcher searcher) throws IOException {
|
IndexSearcher searcher) throws IOException {
|
||||||
|
|
||||||
|
@ -2108,8 +2111,9 @@ public class CollapsingQParserPlugin extends QParserPlugin {
|
||||||
float currentVal = functionValues.floatVal(contextDoc);
|
float currentVal = functionValues.floatVal(contextDoc);
|
||||||
|
|
||||||
if(collapseKey != nullValue) {
|
if(collapseKey != nullValue) {
|
||||||
if(cmap.containsKey(collapseKey)) {
|
final int idx;
|
||||||
int pointer = cmap.lget();
|
if((idx = cmap.indexOf(collapseKey)) >= 0) {
|
||||||
|
int pointer = cmap.indexGet(idx);
|
||||||
if(comp.test(currentVal, testValues[pointer])) {
|
if(comp.test(currentVal, testValues[pointer])) {
|
||||||
testValues[pointer] = currentVal;
|
testValues[pointer] = currentVal;
|
||||||
docs[pointer] = globalDoc;
|
docs[pointer] = globalDoc;
|
||||||
|
|
|
@ -23,8 +23,9 @@ import java.util.Comparator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntFloatOpenHashMap;
|
import com.carrotsearch.hppc.IntFloatHashMap;
|
||||||
import com.carrotsearch.hppc.IntIntOpenHashMap;
|
import com.carrotsearch.hppc.IntIntHashMap;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
@ -42,7 +43,6 @@ import org.apache.lucene.search.TopDocsCollector;
|
||||||
import org.apache.lucene.search.TopFieldCollector;
|
import org.apache.lucene.search.TopFieldCollector;
|
||||||
import org.apache.lucene.search.TopScoreDocCollector;
|
import org.apache.lucene.search.TopScoreDocCollector;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
import org.apache.lucene.util.Bits;
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.solr.common.SolrException;
|
import org.apache.solr.common.SolrException;
|
||||||
import org.apache.solr.common.params.CommonParams;
|
import org.apache.solr.common.params.CommonParams;
|
||||||
|
@ -292,7 +292,7 @@ public class ReRankQParserPlugin extends QParserPlugin {
|
||||||
requestContext = info.getReq().getContext();
|
requestContext = info.getReq().getContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
IntIntOpenHashMap boostedDocs = QueryElevationComponent.getBoostDocs((SolrIndexSearcher)searcher, boostedPriority, requestContext);
|
IntIntHashMap boostedDocs = QueryElevationComponent.getBoostDocs((SolrIndexSearcher)searcher, boostedPriority, requestContext);
|
||||||
|
|
||||||
ScoreDoc[] mainScoreDocs = mainDocs.scoreDocs;
|
ScoreDoc[] mainScoreDocs = mainDocs.scoreDocs;
|
||||||
ScoreDoc[] reRankScoreDocs = new ScoreDoc[Math.min(mainScoreDocs.length, reRankDocs)];
|
ScoreDoc[] reRankScoreDocs = new ScoreDoc[Math.min(mainScoreDocs.length, reRankDocs)];
|
||||||
|
@ -391,14 +391,15 @@ public class ReRankQParserPlugin extends QParserPlugin {
|
||||||
}
|
}
|
||||||
|
|
||||||
public class BoostedComp implements Comparator {
|
public class BoostedComp implements Comparator {
|
||||||
IntFloatOpenHashMap boostedMap;
|
IntFloatHashMap boostedMap;
|
||||||
|
|
||||||
public BoostedComp(IntIntOpenHashMap boostedDocs, ScoreDoc[] scoreDocs, float maxScore) {
|
public BoostedComp(IntIntHashMap boostedDocs, ScoreDoc[] scoreDocs, float maxScore) {
|
||||||
this.boostedMap = new IntFloatOpenHashMap(boostedDocs.size()*2);
|
this.boostedMap = new IntFloatHashMap(boostedDocs.size()*2);
|
||||||
|
|
||||||
for(int i=0; i<scoreDocs.length; i++) {
|
for(int i=0; i<scoreDocs.length; i++) {
|
||||||
if(boostedDocs.containsKey(scoreDocs[i].doc)) {
|
final int idx;
|
||||||
boostedMap.put(scoreDocs[i].doc, maxScore+boostedDocs.lget());
|
if((idx = boostedDocs.indexOf(scoreDocs[i].doc)) >= 0) {
|
||||||
|
boostedMap.put(scoreDocs[i].doc, maxScore+boostedDocs.indexGet(idx));
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -410,21 +411,16 @@ public class ReRankQParserPlugin extends QParserPlugin {
|
||||||
ScoreDoc doc2 = (ScoreDoc) o2;
|
ScoreDoc doc2 = (ScoreDoc) o2;
|
||||||
float score1 = doc1.score;
|
float score1 = doc1.score;
|
||||||
float score2 = doc2.score;
|
float score2 = doc2.score;
|
||||||
if(boostedMap.containsKey(doc1.doc)) {
|
int idx;
|
||||||
score1 = boostedMap.lget();
|
if((idx = boostedMap.indexOf(doc1.doc)) >= 0) {
|
||||||
|
score1 = boostedMap.indexGet(idx);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(boostedMap.containsKey(doc2.doc)) {
|
if((idx = boostedMap.indexOf(doc2.doc)) >= 0) {
|
||||||
score2 = boostedMap.lget();
|
score2 = boostedMap.indexGet(idx);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(score1 > score2) {
|
return -Float.compare(score1, score2);
|
||||||
return -1;
|
|
||||||
} else if(score1 < score2) {
|
|
||||||
return 1;
|
|
||||||
} else {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -19,8 +19,8 @@ package org.apache.solr.util.hll;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntByteOpenHashMap;
|
import com.carrotsearch.hppc.IntByteHashMap;
|
||||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.IntByteCursor;
|
import com.carrotsearch.hppc.cursors.IntByteCursor;
|
||||||
import com.carrotsearch.hppc.cursors.LongCursor;
|
import com.carrotsearch.hppc.cursors.LongCursor;
|
||||||
|
|
||||||
|
@ -69,9 +69,9 @@ public class HLL implements Cloneable {
|
||||||
// ************************************************************************
|
// ************************************************************************
|
||||||
// Storage
|
// Storage
|
||||||
// storage used when #type is EXPLICIT, null otherwise
|
// storage used when #type is EXPLICIT, null otherwise
|
||||||
LongOpenHashSet explicitStorage;
|
LongHashSet explicitStorage;
|
||||||
// storage used when #type is SPARSE, null otherwise
|
// storage used when #type is SPARSE, null otherwise
|
||||||
IntByteOpenHashMap sparseProbabilisticStorage;
|
IntByteHashMap sparseProbabilisticStorage;
|
||||||
// storage used when #type is FULL, null otherwise
|
// storage used when #type is FULL, null otherwise
|
||||||
BitVector probabilisticStorage;
|
BitVector probabilisticStorage;
|
||||||
|
|
||||||
|
@ -398,8 +398,9 @@ public class HLL implements Cloneable {
|
||||||
final int j = (int)(rawValue & mBitsMask);
|
final int j = (int)(rawValue & mBitsMask);
|
||||||
|
|
||||||
final byte currentValue;
|
final byte currentValue;
|
||||||
if (sparseProbabilisticStorage.containsKey(j)) {
|
final int index = sparseProbabilisticStorage.indexOf(j);
|
||||||
currentValue = sparseProbabilisticStorage.lget();
|
if (index >= 0) {
|
||||||
|
currentValue = sparseProbabilisticStorage.indexGet(index);
|
||||||
} else {
|
} else {
|
||||||
currentValue = 0;
|
currentValue = 0;
|
||||||
}
|
}
|
||||||
|
@ -467,10 +468,10 @@ public class HLL implements Cloneable {
|
||||||
// nothing to be done
|
// nothing to be done
|
||||||
break;
|
break;
|
||||||
case EXPLICIT:
|
case EXPLICIT:
|
||||||
this.explicitStorage = new LongOpenHashSet();
|
this.explicitStorage = new LongHashSet();
|
||||||
break;
|
break;
|
||||||
case SPARSE:
|
case SPARSE:
|
||||||
this.sparseProbabilisticStorage = new IntByteOpenHashMap();
|
this.sparseProbabilisticStorage = new IntByteHashMap();
|
||||||
break;
|
break;
|
||||||
case FULL:
|
case FULL:
|
||||||
this.probabilisticStorage = new BitVector(regwidth, m);
|
this.probabilisticStorage = new BitVector(regwidth, m);
|
||||||
|
@ -522,7 +523,7 @@ public class HLL implements Cloneable {
|
||||||
for(int j=0; j<m; j++) {
|
for(int j=0; j<m; j++) {
|
||||||
final long register;
|
final long register;
|
||||||
if (sparseProbabilisticStorage.containsKey(j)) {
|
if (sparseProbabilisticStorage.containsKey(j)) {
|
||||||
register = sparseProbabilisticStorage.lget();
|
register = sparseProbabilisticStorage.get(j);
|
||||||
} else {
|
} else {
|
||||||
register = 0;
|
register = 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,7 +88,6 @@ public class TestDistributedStatsComponentCardinality extends BaseDistributedSea
|
||||||
"long_l", ""+longValue,
|
"long_l", ""+longValue,
|
||||||
"long_l_prehashed_l", ""+HASHER.hashLong(longValue).asLong(),
|
"long_l_prehashed_l", ""+HASHER.hashLong(longValue).asLong(),
|
||||||
"string_s", strValue,
|
"string_s", strValue,
|
||||||
// NOTE: renamed hashUnencodedChars starting with guava 15
|
|
||||||
"string_s_prehashed_l", ""+HASHER.hashString(strValue).asLong()));
|
"string_s_prehashed_l", ""+HASHER.hashString(strValue).asLong()));
|
||||||
|
|
||||||
longValue -= BIG_PRIME;
|
longValue -= BIG_PRIME;
|
||||||
|
|
|
@ -22,7 +22,7 @@ import java.util.HashSet;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.LongOpenHashSet;
|
import com.carrotsearch.hppc.LongHashSet;
|
||||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.*;
|
import static com.carrotsearch.randomizedtesting.RandomizedTest.*;
|
||||||
|
|
||||||
|
|
||||||
|
@ -214,8 +214,8 @@ public class ExplicitHLLTest extends LuceneTestCase {
|
||||||
* Asserts that values in both sets are exactly equal.
|
* Asserts that values in both sets are exactly equal.
|
||||||
*/
|
*/
|
||||||
private static void assertElementsEqual(final HLL hllA, final HLL hllB) {
|
private static void assertElementsEqual(final HLL hllA, final HLL hllB) {
|
||||||
final LongOpenHashSet internalSetA = hllA.explicitStorage;
|
final LongHashSet internalSetA = hllA.explicitStorage;
|
||||||
final LongOpenHashSet internalSetB = hllB.explicitStorage;
|
final LongHashSet internalSetB = hllB.explicitStorage;
|
||||||
|
|
||||||
assertTrue(internalSetA.equals(internalSetB));
|
assertTrue(internalSetA.equals(internalSetB));
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.solr.util.hll;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.IntByteOpenHashMap;
|
import com.carrotsearch.hppc.IntByteHashMap;
|
||||||
import com.carrotsearch.hppc.cursors.IntByteCursor;
|
import com.carrotsearch.hppc.cursors.IntByteCursor;
|
||||||
import com.carrotsearch.randomizedtesting.RandomizedTest;
|
import com.carrotsearch.randomizedtesting.RandomizedTest;
|
||||||
|
|
||||||
|
@ -393,7 +393,7 @@ public class SparseHLLTest extends LuceneTestCase {
|
||||||
for(int run=0; run<100; run++) {
|
for(int run=0; run<100; run++) {
|
||||||
final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE);
|
final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE);
|
||||||
|
|
||||||
final IntByteOpenHashMap map = new IntByteOpenHashMap();
|
final IntByteHashMap map = new IntByteHashMap();
|
||||||
|
|
||||||
for(int i=0; i<sparseThreshold; i++) {
|
for(int i=0; i<sparseThreshold; i++) {
|
||||||
final long rawValue = RandomizedTest.randomLong();
|
final long rawValue = RandomizedTest.randomLong();
|
||||||
|
@ -423,7 +423,7 @@ public class SparseHLLTest extends LuceneTestCase {
|
||||||
private static void assertRegisterPresent(final HLL hll,
|
private static void assertRegisterPresent(final HLL hll,
|
||||||
final int registerIndex,
|
final int registerIndex,
|
||||||
final int registerValue) {
|
final int registerValue) {
|
||||||
final IntByteOpenHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage;
|
final IntByteHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage;
|
||||||
assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue);
|
assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -433,7 +433,7 @@ public class SparseHLLTest extends LuceneTestCase {
|
||||||
private static void assertOneRegisterSet(final HLL hll,
|
private static void assertOneRegisterSet(final HLL hll,
|
||||||
final int registerIndex,
|
final int registerIndex,
|
||||||
final byte registerValue) {
|
final byte registerValue) {
|
||||||
final IntByteOpenHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage;
|
final IntByteHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage;
|
||||||
assertEquals(sparseProbabilisticStorage.size(), 1);
|
assertEquals(sparseProbabilisticStorage.size(), 1);
|
||||||
assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue);
|
assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue);
|
||||||
}
|
}
|
||||||
|
@ -442,8 +442,8 @@ public class SparseHLLTest extends LuceneTestCase {
|
||||||
* Asserts that all registers in the two {@link HLL} instances are identical.
|
* Asserts that all registers in the two {@link HLL} instances are identical.
|
||||||
*/
|
*/
|
||||||
private static void assertElementsEqual(final HLL hllA, final HLL hllB) {
|
private static void assertElementsEqual(final HLL hllA, final HLL hllB) {
|
||||||
final IntByteOpenHashMap sparseProbabilisticStorageA = hllA.sparseProbabilisticStorage;
|
final IntByteHashMap sparseProbabilisticStorageA = hllA.sparseProbabilisticStorage;
|
||||||
final IntByteOpenHashMap sparseProbabilisticStorageB = hllB.sparseProbabilisticStorage;
|
final IntByteHashMap sparseProbabilisticStorageB = hllB.sparseProbabilisticStorage;
|
||||||
assertEquals(sparseProbabilisticStorageA.size(), sparseProbabilisticStorageB.size());
|
assertEquals(sparseProbabilisticStorageA.size(), sparseProbabilisticStorageB.size());
|
||||||
for (IntByteCursor c : sparseProbabilisticStorageA) {
|
for (IntByteCursor c : sparseProbabilisticStorageA) {
|
||||||
assertEquals(sparseProbabilisticStorageA.get(c.key),
|
assertEquals(sparseProbabilisticStorageA.get(c.key),
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
d597da840c4f706a1b0bcf55935c6c30c0fbb5f2
|
|
|
@ -0,0 +1 @@
|
||||||
|
4dd69fb0bac1148a408d88a0e0ef5b92edcee70f
|
|
@ -0,0 +1 @@
|
||||||
|
539317dc171b8c92cca964e87686602800cf19b0
|
|
@ -0,0 +1,202 @@
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
|
@ -0,0 +1,5 @@
|
||||||
|
This product includes software developed by
|
||||||
|
Google, Inc. (http://code.google.com/p/guava-libraries/)
|
||||||
|
|
||||||
|
Repacked Carrot2 Guava at:
|
||||||
|
https://github.com/carrot2/lib-repackaged
|
|
@ -0,0 +1 @@
|
||||||
|
49561bfc65428dff43b78d0c553b545a7dbd66fa
|
|
@ -1 +0,0 @@
|
||||||
8eafc39444cd8d80802b2712d52ae751f3007cad
|
|
|
@ -1 +0,0 @@
|
||||||
074bcc9d152a928a4ea9ac59a5b45850bf00cd4e
|
|
|
@ -0,0 +1 @@
|
||||||
|
8b5057f74ea378c0150a1860874a3ebdcb713767
|
|
@ -0,0 +1 @@
|
||||||
|
87100c6baf60f096b42b9af06dafeb20f686cd02
|
|
@ -1 +0,0 @@
|
||||||
fdf556c88d66f65440bd24024f55a52c227c0e3f
|
|
|
@ -0,0 +1 @@
|
||||||
|
0f8eeb58acb5a39e162c0d49fcf29a70744cc2bc
|
|
@ -1 +0,0 @@
|
||||||
e03b9feb39f6e2c0ac7c37e220d01cdae66d3a28
|
|
|
@ -0,0 +1 @@
|
||||||
|
a74ad7ceb29ff1d8194eb161f5b2dfbd636626a5
|
|
@ -1 +0,0 @@
|
||||||
c81d6c63e22e97819063cad7f1ecd20269cba720
|
|
|
@ -0,0 +1 @@
|
||||||
|
dd91fb744c2ff921407475cb29a1e3fee397d411
|
|
@ -1 +0,0 @@
|
||||||
48f90a787b2d59faab3b8c203945e4b0db32aec4
|
|
Loading…
Reference in New Issue