mirror of https://github.com/apache/lucene.git
LUCENE-3023: merged with trunk
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/realtime_search@1096120 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
990a5d6f3a
|
@ -19,7 +19,7 @@
|
|||
|
||||
<project name="lucene-solr" default="test" basedir=".">
|
||||
<import file="common-build.xml"/>
|
||||
<target name="test" description="Test both Lucene and Solr">
|
||||
<target name="test" description="Test both Lucene and Solr" depends="validate">
|
||||
<sequential>
|
||||
<subant target="test" inheritall="false" failonerror="true">
|
||||
<fileset dir="lucene" includes="build.xml" />
|
||||
|
@ -35,7 +35,7 @@
|
|||
<fileset dir="solr" includes="build.xml" />
|
||||
</subant></sequential>
|
||||
</target>
|
||||
<target name="compile" depends="validate" description="Compile Lucene and Solr">
|
||||
<target name="compile" description="Compile Lucene and Solr">
|
||||
<sequential>
|
||||
|
||||
<subant target="compile" inheritall="false" failonerror="true">
|
||||
|
|
|
@ -387,8 +387,8 @@ Test Cases
|
|||
|
||||
Build
|
||||
|
||||
* LUCENE-3006: Building javadocs will fail on warnings by default. Override with -Dfailonjavadocwarning=false (sarowe, gsingers)
|
||||
|
||||
* LUCENE-3006: Building javadocs will fail on warnings by default.
|
||||
Override with -Dfailonjavadocwarning=false (sarowe, gsingers)
|
||||
|
||||
======================= Lucene 3.x (not yet released) =======================
|
||||
|
||||
|
@ -410,6 +410,14 @@ Bug fixes
|
|||
seeking TermEnum (eg used by Solr's faceting) (Tom Burton-West, Mike
|
||||
McCandless)
|
||||
|
||||
* LUCENE-3042: When a filter or consumer added Attributes to a TokenStream
|
||||
chain after it was already (partly) consumed [or clearAttributes(),
|
||||
captureState(), cloneAttributes(),... was called by the Tokenizer],
|
||||
the Tokenizer calling clearAttributes() or capturing state after addition
|
||||
may not do this on the newly added Attribute. This bug affected only
|
||||
very special use cases of the TokenStream-API, most users would not
|
||||
have recognized it. (Uwe Schindler, Robert Muir)
|
||||
|
||||
======================= Lucene 3.1.0 =======================
|
||||
|
||||
Changes in backwards compatibility policy
|
||||
|
|
|
@ -308,7 +308,7 @@
|
|||
</copy>
|
||||
</target>
|
||||
|
||||
<target name="compile" depends="compile-core, validate-lucene">
|
||||
<target name="compile" depends="compile-core">
|
||||
<!-- convenience target to compile core -->
|
||||
</target>
|
||||
|
||||
|
@ -565,7 +565,7 @@
|
|||
</sequential>
|
||||
</macrodef>
|
||||
|
||||
<target name="test" depends="compile-test,junit-mkdir,junit-sequential,junit-parallel" description="Runs unit tests"/>
|
||||
<target name="test" depends="compile-test,validate-lucene,junit-mkdir,junit-sequential,junit-parallel" description="Runs unit tests"/>
|
||||
|
||||
<target name="junit-mkdir">
|
||||
<mkdir dir="${junit.output.dir}"/>
|
||||
|
|
|
@ -45,10 +45,15 @@ API Changes
|
|||
|
||||
======================= Lucene 3.x (not yet released) =======================
|
||||
|
||||
Bug fixes
|
||||
Bug Fixes
|
||||
|
||||
* LUCENE-3026: SmartChineseAnalyzer's WordTokenFilter threw NullPointerException
|
||||
on sentences longer than 32,767 characters. (wangzhenghang via Robert Muir)
|
||||
|
||||
* LUCENE-2939: Highlighter should try and use maxDocCharsToAnalyze in
|
||||
WeightedSpanTermExtractor when adding a new field to MemoryIndex as well as
|
||||
when using CachingTokenStream. This can be a significant performance bug for
|
||||
large documents. (Mark Miller)
|
||||
|
||||
New Features
|
||||
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
AnyObjectId[9a9ff077cdd36a96e7e0506986edd4e52b90a22f] was removed in git history.
|
||||
Apache SVN contains full history.
|
|
@ -0,0 +1 @@
|
|||
No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking.
|
|
@ -0,0 +1 @@
|
|||
No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking.
|
|
@ -0,0 +1 @@
|
|||
No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking.
|
|
@ -0,0 +1,2 @@
|
|||
AnyObjectId[99baf20bacd712cae91dd6e4e1f46224cafa1a37] was removed in git history.
|
||||
Apache SVN contains full history.
|
|
@ -0,0 +1 @@
|
|||
No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking.
|
|
@ -197,6 +197,11 @@ public class Highlighter
|
|||
tokenStream.reset();
|
||||
|
||||
TextFragment currentFrag = new TextFragment(newText,newText.length(), docFrags.size());
|
||||
|
||||
if (fragmentScorer instanceof QueryScorer) {
|
||||
((QueryScorer) fragmentScorer).setMaxDocCharsToAnalyze(maxDocCharsToAnalyze);
|
||||
}
|
||||
|
||||
TokenStream newStream = fragmentScorer.init(tokenStream);
|
||||
if(newStream != null) {
|
||||
tokenStream = newStream;
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
package org.apache.lucene.search.highlight;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
/**
* This TokenFilter limits the number of tokens while indexing by adding up the
* current offset.
*/
public final class OffsetLimitTokenFilter extends TokenFilter {
private int offsetCount;
private OffsetAttribute offsetAttrib = getAttribute(OffsetAttribute.class);
private int offsetLimit;
public OffsetLimitTokenFilter(TokenStream input, int offsetLimit) {
super(input);
this.offsetLimit = offsetLimit;
}
@Override
public boolean incrementToken() throws IOException {
if (offsetCount < offsetLimit && input.incrementToken()) {
int offsetLength = offsetAttrib.endOffset() - offsetAttrib.startOffset();
offsetCount += offsetLength;
return true;
}
return false;
}
@Override
public void reset() throws IOException {
super.reset();
offsetCount = 0;
}
}
|
|
@ -54,6 +54,7 @@ public class QueryScorer implements Scorer {
|
|||
private IndexReader reader;
|
||||
private boolean skipInitExtractor;
|
||||
private boolean wrapToCaching = true;
|
||||
private int maxCharsToAnalyze;
|
||||
|
||||
/**
|
||||
* @param query Query to use for highlighting
|
||||
|
@ -209,7 +210,7 @@ public class QueryScorer implements Scorer {
|
|||
private TokenStream initExtractor(TokenStream tokenStream) throws IOException {
|
||||
WeightedSpanTermExtractor qse = defaultField == null ? new WeightedSpanTermExtractor()
|
||||
: new WeightedSpanTermExtractor(defaultField);
|
||||
|
||||
qse.setMaxDocCharsToAnalyze(maxCharsToAnalyze);
|
||||
qse.setExpandMultiTermQuery(expandMultiTermQuery);
|
||||
qse.setWrapIfNotCachingTokenFilter(wrapToCaching);
|
||||
if (reader == null) {
|
||||
|
@ -265,4 +266,8 @@ public class QueryScorer implements Scorer {
|
|||
public void setWrapIfNotCachingTokenFilter(boolean wrap) {
|
||||
this.wrapToCaching = wrap;
|
||||
}
|
||||
|
||||
public void setMaxDocCharsToAnalyze(int maxDocCharsToAnalyze) {
|
||||
this.maxCharsToAnalyze = maxDocCharsToAnalyze;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,6 +56,7 @@ public class WeightedSpanTermExtractor {
|
|||
private boolean expandMultiTermQuery;
|
||||
private boolean cachedTokenStream;
|
||||
private boolean wrapToCaching = true;
|
||||
private int maxDocCharsToAnalyze;
|
||||
|
||||
public WeightedSpanTermExtractor() {
|
||||
}
|
||||
|
@ -320,13 +321,13 @@ public class WeightedSpanTermExtractor {
|
|||
|
||||
private AtomicReaderContext getLeafContextForField(String field) throws IOException {
|
||||
if(wrapToCaching && !cachedTokenStream && !(tokenStream instanceof CachingTokenFilter)) {
|
||||
tokenStream = new CachingTokenFilter(tokenStream);
|
||||
tokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tokenStream, maxDocCharsToAnalyze));
|
||||
cachedTokenStream = true;
|
||||
}
|
||||
AtomicReaderContext context = readers.get(field);
|
||||
if (context == null) {
|
||||
MemoryIndex indexer = new MemoryIndex();
|
||||
indexer.addField(field, tokenStream);
|
||||
indexer.addField(field, new OffsetLimitTokenFilter(tokenStream, maxDocCharsToAnalyze));
|
||||
tokenStream.reset();
|
||||
IndexSearcher searcher = indexer.createSearcher();
|
||||
// MEM index has only atomic ctx
|
||||
|
@ -545,4 +546,8 @@ public class WeightedSpanTermExtractor {
|
|||
public void setWrapIfNotCachingTokenFilter(boolean wrap) {
|
||||
this.wrapToCaching = wrap;
|
||||
}
|
||||
|
||||
protected final void setMaxDocCharsToAnalyze(int maxDocCharsToAnalyze) {
|
||||
this.maxDocCharsToAnalyze = maxDocCharsToAnalyze;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
package org.apache.lucene.search.highlight;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.Reader;
import java.io.StringReader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
public class OffsetLimitTokenFilterTest extends BaseTokenStreamTestCase {
public void testFilter() throws Exception {
TokenStream stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
OffsetLimitTokenFilter filter = new OffsetLimitTokenFilter(stream, 10);
assertTokenStreamContents(filter, new String[] {"short", "toolong"});
stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
filter = new OffsetLimitTokenFilter(stream, 12);
assertTokenStreamContents(filter, new String[] {"short", "toolong"});
stream = new MockTokenizer(new StringReader(
"short toolong evenmuchlongertext a ab toolong foo"),
MockTokenizer.WHITESPACE, false);
filter = new OffsetLimitTokenFilter(stream, 30);
assertTokenStreamContents(filter, new String[] {"short", "toolong",
"evenmuchlongertext"});
checkOneTermReuse(new Analyzer() {
@Override
public TokenStream tokenStream(String fieldName, Reader reader) {
return new OffsetLimitTokenFilter(new MockTokenizer(reader,
MockTokenizer.WHITESPACE, false), 10);
}
}, "llenges", "llenges");
}
}
|
|
@ -184,9 +184,21 @@ public abstract class MultiLevelSkipListReader {
|
|||
}
|
||||
}
|
||||
|
||||
/** returns x == 0 ? 0 : Math.floor(Math.log(x) / Math.log(base)) */
|
||||
static int log(int x, int base) {
|
||||
assert base >= 2;
|
||||
int ret = 0;
|
||||
long n = base; // needs to be a long to avoid overflow
|
||||
while (x >= n) {
|
||||
n *= base;
|
||||
ret++;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/** Loads the skip levels */
|
||||
private void loadSkipLevels() throws IOException {
|
||||
numberOfSkipLevels = docCount == 0 ? 0 : (int) Math.floor(Math.log(docCount) / Math.log(skipInterval[0]));
|
||||
numberOfSkipLevels = log(docCount, skipInterval[0]);
|
||||
if (numberOfSkipLevels > maxNumberOfSkipLevels) {
|
||||
numberOfSkipLevels = maxNumberOfSkipLevels;
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ public abstract class MultiLevelSkipListWriter {
|
|||
this.skipInterval = skipInterval;
|
||||
|
||||
// calculate the maximum number of skip levels for this document frequency
|
||||
numberOfSkipLevels = df == 0 ? 0 : (int) Math.floor(Math.log(df) / Math.log(skipInterval));
|
||||
numberOfSkipLevels = MultiLevelSkipListReader.log(df, skipInterval);
|
||||
|
||||
// make sure it does not exceed maxSkipLevels
|
||||
if (numberOfSkipLevels > maxSkipLevels) {
|
||||
|
|
|
@ -93,10 +93,33 @@ public class AttributeSource {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This class holds the state of an AttributeSource.
|
||||
* @see #captureState
|
||||
* @see #restoreState
|
||||
*/
|
||||
public static final class State implements Cloneable {
|
||||
AttributeImpl attribute;
|
||||
State next;
|
||||
|
||||
@Override
|
||||
public Object clone() {
|
||||
State clone = new State();
|
||||
clone.attribute = (AttributeImpl) attribute.clone();
|
||||
|
||||
if (next != null) {
|
||||
clone.next = (State) next.clone();
|
||||
}
|
||||
|
||||
return clone;
|
||||
}
|
||||
}
|
||||
|
||||
// These two maps must always be in sync!!!
|
||||
// So they are private, final and read-only from the outside (read-only iterators)
|
||||
private final Map<Class<? extends Attribute>, AttributeImpl> attributes;
|
||||
private final Map<Class<? extends AttributeImpl>, AttributeImpl> attributeImpls;
|
||||
private final State[] currentState;
|
||||
|
||||
private AttributeFactory factory;
|
||||
|
||||
|
@ -116,6 +139,7 @@ public class AttributeSource {
|
|||
}
|
||||
this.attributes = input.attributes;
|
||||
this.attributeImpls = input.attributeImpls;
|
||||
this.currentState = input.currentState;
|
||||
this.factory = input.factory;
|
||||
}
|
||||
|
||||
|
@ -125,6 +149,7 @@ public class AttributeSource {
|
|||
public AttributeSource(AttributeFactory factory) {
|
||||
this.attributes = new LinkedHashMap<Class<? extends Attribute>, AttributeImpl>();
|
||||
this.attributeImpls = new LinkedHashMap<Class<? extends AttributeImpl>, AttributeImpl>();
|
||||
this.currentState = new State[1];
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
|
@ -147,11 +172,8 @@ public class AttributeSource {
|
|||
* if one instance implements more than one Attribute interface.
|
||||
*/
|
||||
public final Iterator<AttributeImpl> getAttributeImplsIterator() {
|
||||
if (hasAttributes()) {
|
||||
if (currentState == null) {
|
||||
computeCurrentState();
|
||||
}
|
||||
final State initState = currentState;
|
||||
final State initState = getCurrentState();
|
||||
if (initState != null) {
|
||||
return new Iterator<AttributeImpl>() {
|
||||
private State state = initState;
|
||||
|
||||
|
@ -225,7 +247,7 @@ public class AttributeSource {
|
|||
// Attribute is a superclass of this interface
|
||||
if (!attributes.containsKey(curInterface)) {
|
||||
// invalidate state to force recomputation in captureState()
|
||||
this.currentState = null;
|
||||
this.currentState[0] = null;
|
||||
attributes.put(curInterface, att);
|
||||
attributeImpls.put(clazz, att);
|
||||
}
|
||||
|
@ -283,41 +305,21 @@ public class AttributeSource {
|
|||
}
|
||||
return attClass.cast(attImpl);
|
||||
}
|
||||
|
||||
/**
|
||||
* This class holds the state of an AttributeSource.
|
||||
* @see #captureState
|
||||
* @see #restoreState
|
||||
*/
|
||||
public static final class State implements Cloneable {
|
||||
AttributeImpl attribute;
|
||||
State next;
|
||||
|
||||
@Override
|
||||
public Object clone() {
|
||||
State clone = new State();
|
||||
clone.attribute = (AttributeImpl) attribute.clone();
|
||||
|
||||
if (next != null) {
|
||||
clone.next = (State) next.clone();
|
||||
}
|
||||
|
||||
return clone;
|
||||
private State getCurrentState() {
|
||||
State s = currentState[0];
|
||||
if (s != null || !hasAttributes()) {
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
||||
private State currentState = null;
|
||||
|
||||
private void computeCurrentState() {
|
||||
currentState = new State();
|
||||
State c = currentState;
|
||||
State c = s = currentState[0] = new State();
|
||||
final Iterator<AttributeImpl> it = attributeImpls.values().iterator();
|
||||
c.attribute = it.next();
|
||||
while (it.hasNext()) {
|
||||
c.next = new State();
|
||||
c = c.next;
|
||||
c.attribute = it.next();
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -325,13 +327,8 @@ public class AttributeSource {
|
|||
* {@link AttributeImpl#clear()} on each Attribute implementation.
|
||||
*/
|
||||
public final void clearAttributes() {
|
||||
if (hasAttributes()) {
|
||||
if (currentState == null) {
|
||||
computeCurrentState();
|
||||
}
|
||||
for (State state = currentState; state != null; state = state.next) {
|
||||
state.attribute.clear();
|
||||
}
|
||||
for (State state = getCurrentState(); state != null; state = state.next) {
|
||||
state.attribute.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -340,14 +337,8 @@ public class AttributeSource {
|
|||
* {@link #restoreState} to restore the state of this or another AttributeSource.
|
||||
*/
|
||||
public final State captureState() {
|
||||
if (!hasAttributes()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (currentState == null) {
|
||||
computeCurrentState();
|
||||
}
|
||||
return (State) this.currentState.clone();
|
||||
final State state = this.getCurrentState();
|
||||
return (state == null) ? null : (State) state.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -382,15 +373,9 @@ public class AttributeSource {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
int code = 0;
|
||||
if (hasAttributes()) {
|
||||
if (currentState == null) {
|
||||
computeCurrentState();
|
||||
}
|
||||
for (State state = currentState; state != null; state = state.next) {
|
||||
code = code * 31 + state.attribute.hashCode();
|
||||
}
|
||||
for (State state = getCurrentState(); state != null; state = state.next) {
|
||||
code = code * 31 + state.attribute.hashCode();
|
||||
}
|
||||
|
||||
return code;
|
||||
}
|
||||
|
||||
|
@ -413,14 +398,8 @@ public class AttributeSource {
|
|||
}
|
||||
|
||||
// it is only equal if all attribute impls are the same in the same order
|
||||
if (this.currentState == null) {
|
||||
this.computeCurrentState();
|
||||
}
|
||||
State thisState = this.currentState;
|
||||
if (other.currentState == null) {
|
||||
other.computeCurrentState();
|
||||
}
|
||||
State otherState = other.currentState;
|
||||
State thisState = this.getCurrentState();
|
||||
State otherState = other.getCurrentState();
|
||||
while (thisState != null && otherState != null) {
|
||||
if (otherState.attribute.getClass() != thisState.attribute.getClass() || !otherState.attribute.equals(thisState.attribute)) {
|
||||
return false;
|
||||
|
@ -473,13 +452,8 @@ public class AttributeSource {
|
|||
* @see AttributeImpl#reflectWith
|
||||
*/
|
||||
public final void reflectWith(AttributeReflector reflector) {
|
||||
if (hasAttributes()) {
|
||||
if (currentState == null) {
|
||||
computeCurrentState();
|
||||
}
|
||||
for (State state = currentState; state != null; state = state.next) {
|
||||
state.attribute.reflectWith(reflector);
|
||||
}
|
||||
for (State state = getCurrentState(); state != null; state = state.next) {
|
||||
state.attribute.reflectWith(reflector);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -495,10 +469,7 @@ public class AttributeSource {
|
|||
|
||||
if (hasAttributes()) {
|
||||
// first clone the impls
|
||||
if (currentState == null) {
|
||||
computeCurrentState();
|
||||
}
|
||||
for (State state = currentState; state != null; state = state.next) {
|
||||
for (State state = getCurrentState(); state != null; state = state.next) {
|
||||
clone.attributeImpls.put(state.attribute.getClass(), (AttributeImpl) state.attribute.clone());
|
||||
}
|
||||
|
||||
|
@ -520,18 +491,13 @@ public class AttributeSource {
|
|||
* {@link #cloneAttributes} instead of {@link #captureState}.
|
||||
*/
|
||||
public final void copyTo(AttributeSource target) {
|
||||
if (hasAttributes()) {
|
||||
if (currentState == null) {
|
||||
computeCurrentState();
|
||||
}
|
||||
for (State state = currentState; state != null; state = state.next) {
|
||||
final AttributeImpl targetImpl = target.attributeImpls.get(state.attribute.getClass());
|
||||
if (targetImpl == null) {
|
||||
throw new IllegalArgumentException("This AttributeSource contains AttributeImpl of type " +
|
||||
state.attribute.getClass().getName() + " that is not in the target");
|
||||
}
|
||||
state.attribute.copyTo(targetImpl);
|
||||
for (State state = getCurrentState(); state != null; state = state.next) {
|
||||
final AttributeImpl targetImpl = target.attributeImpls.get(state.attribute.getClass());
|
||||
if (targetImpl == null) {
|
||||
throw new IllegalArgumentException("This AttributeSource contains AttributeImpl of type " +
|
||||
state.attribute.getClass().getName() + " that is not in the target");
|
||||
}
|
||||
state.attribute.copyTo(targetImpl);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -117,11 +117,24 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
|
|||
assertEquals("type "+i, types[i], typeAtt.type());
|
||||
if (posIncrements != null)
|
||||
assertEquals("posIncrement "+i, posIncrements[i], posIncrAtt.getPositionIncrement());
|
||||
|
||||
// we can enforce some basic things about a few attributes even if the caller doesn't check:
|
||||
if (offsetAtt != null) {
|
||||
assertTrue("startOffset must be >= 0", offsetAtt.startOffset() >= 0);
|
||||
assertTrue("endOffset must be >= 0", offsetAtt.endOffset() >= 0);
|
||||
assertTrue("endOffset must be >= startOffset", offsetAtt.endOffset() >= offsetAtt.startOffset());
|
||||
}
|
||||
if (posIncrAtt != null) {
|
||||
assertTrue("posIncrement must be >= 0", posIncrAtt.getPositionIncrement() >= 0);
|
||||
}
|
||||
}
|
||||
assertFalse("end of stream", ts.incrementToken());
|
||||
ts.end();
|
||||
if (finalOffset != null)
|
||||
assertEquals("finalOffset ", finalOffset.intValue(), offsetAtt.endOffset());
|
||||
if (offsetAtt != null) {
|
||||
assertTrue("finalOffset must be >= 0", offsetAtt.endOffset() >= 0);
|
||||
}
|
||||
ts.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -122,7 +122,7 @@ public class MockRandomCodec extends Codec {
|
|||
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
|
||||
// we pull this before the seed intentionally: because its not consumed at runtime
|
||||
// (the skipInterval is written into postings header)
|
||||
int skipInterval = _TestUtil.nextInt(seedRandom, 2, 64);
|
||||
int skipInterval = _TestUtil.nextInt(seedRandom, 2, 10);
|
||||
|
||||
if (LuceneTestCase.VERBOSE) {
|
||||
System.out.println("MockRandomCodec: skipInterval=" + skipInterval);
|
||||
|
|
|
@ -128,6 +128,9 @@ public abstract class LuceneTestCase extends Assert {
|
|||
TEMP_DIR = new File(s);
|
||||
TEMP_DIR.mkdirs();
|
||||
}
|
||||
|
||||
/** set of directories we created, in afterclass we try to clean these up */
|
||||
static final Set<String> tempDirs = Collections.synchronizedSet(new HashSet<String>());
|
||||
|
||||
// by default we randomly pick a different codec for
|
||||
// each test case (non-J4 tests) and each test class (J4
|
||||
|
@ -323,6 +326,7 @@ public abstract class LuceneTestCase extends Assert {
|
|||
public static void beforeClassLuceneTestCaseJ4() {
|
||||
staticSeed = "random".equals(TEST_SEED) ? seedRand.nextLong() : TwoLongs.fromString(TEST_SEED).l1;
|
||||
random.setSeed(staticSeed);
|
||||
tempDirs.clear();
|
||||
stores = Collections.synchronizedMap(new IdentityHashMap<MockDirectoryWrapper,StackTraceElement[]>());
|
||||
savedCodecProvider = CodecProvider.getDefault();
|
||||
if ("randomPerField".equals(TEST_CODEC)) {
|
||||
|
@ -411,6 +415,16 @@ public abstract class LuceneTestCase extends Assert {
|
|||
+ "free=" + Runtime.getRuntime().freeMemory() + ","
|
||||
+ "total=" + Runtime.getRuntime().totalMemory());
|
||||
}
|
||||
// clear out any temp directories if we can
|
||||
if (!testsFailed) {
|
||||
for (String path : tempDirs) {
|
||||
try {
|
||||
_TestUtil.rmDir(new File(path));
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean testsFailed; /* true if any tests failed */
|
||||
|
@ -1058,6 +1072,7 @@ public abstract class LuceneTestCase extends Assert {
|
|||
final File tmpFile = File.createTempFile("test", "tmp", TEMP_DIR);
|
||||
tmpFile.delete();
|
||||
tmpFile.mkdir();
|
||||
tempDirs.add(tmpFile.getAbsolutePath());
|
||||
return newFSDirectoryImpl(clazz.asSubclass(FSDirectory.class), tmpFile, null);
|
||||
}
|
||||
|
||||
|
|
|
@ -54,7 +54,9 @@ public class _TestUtil {
|
|||
/** Returns temp dir, containing String arg in its name;
|
||||
* does not create the directory. */
|
||||
public static File getTempDir(String desc) {
|
||||
return new File(LuceneTestCase.TEMP_DIR, desc + "." + new Random().nextLong());
|
||||
File f = new File(LuceneTestCase.TEMP_DIR, desc + "." + new Random().nextLong());
|
||||
LuceneTestCase.tempDirs.add(f.getAbsolutePath());
|
||||
return f;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -89,7 +91,8 @@ public class _TestUtil {
|
|||
rmDir(destDir);
|
||||
|
||||
destDir.mkdir();
|
||||
|
||||
LuceneTestCase.tempDirs.add(destDir.getAbsolutePath());
|
||||
|
||||
while (entries.hasMoreElements()) {
|
||||
ZipEntry entry = entries.nextElement();
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.apache.lucene.analysis;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.util.automaton.Automaton;
|
||||
|
@ -95,4 +96,19 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
|
|||
new String[] { "ok", "fine" },
|
||||
new int[] { 1, 2 });
|
||||
}
|
||||
|
||||
public void testLUCENE_3042() throws Exception {
|
||||
String testString = "t";
|
||||
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
TokenStream stream = analyzer.reusableTokenStream("dummy", new StringReader(testString));
|
||||
stream.reset();
|
||||
while (stream.incrementToken()) {
|
||||
// consume
|
||||
}
|
||||
stream.end();
|
||||
|
||||
assertAnalyzesToReuse(analyzer, testString, new String[] { "t" });
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -57,8 +57,7 @@ public class TestCompoundFile extends LuceneTestCase
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
File file = new File(TEMP_DIR, "testIndex");
|
||||
_TestUtil.rmDir(file);
|
||||
File file = _TestUtil.getTempDir("testIndex");
|
||||
// use a simple FSDir here, to be sure to have SimpleFSInputs
|
||||
dir = new SimpleFSDirectory(file,null);
|
||||
}
|
||||
|
@ -66,7 +65,6 @@ public class TestCompoundFile extends LuceneTestCase
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
dir.close();
|
||||
_TestUtil.rmDir(new File(TEMP_DIR, "testIndex"));
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
|||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.index.codecs.CodecProvider;
|
||||
|
||||
|
||||
|
@ -60,10 +61,10 @@ public class TestDoc extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("TEST: setUp");
|
||||
}
|
||||
workDir = new File(TEMP_DIR,"TestDoc");
|
||||
workDir = _TestUtil.getTempDir("TestDoc");
|
||||
workDir.mkdirs();
|
||||
|
||||
indexDir = new File(workDir, "testIndex");
|
||||
indexDir = _TestUtil.getTempDir("testIndex");
|
||||
indexDir.mkdirs();
|
||||
|
||||
Directory directory = newFSDirectory(indexDir);
|
||||
|
|
|
@ -286,8 +286,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
*/
|
||||
public void testLazyPerformance() throws Exception {
|
||||
String userName = System.getProperty("user.name");
|
||||
File file = new File(TEMP_DIR, "lazyDir" + userName);
|
||||
_TestUtil.rmDir(file);
|
||||
File file = _TestUtil.getTempDir("lazyDir" + userName);
|
||||
Directory tmpDir = newFSDirectory(file);
|
||||
assertTrue(tmpDir != null);
|
||||
|
||||
|
@ -473,7 +472,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
|
||||
// LUCENE-1262
|
||||
public void testExceptions() throws Throwable {
|
||||
File indexDir = new File(TEMP_DIR, "testfieldswriterexceptions");
|
||||
File indexDir = _TestUtil.getTempDir("testfieldswriterexceptions");
|
||||
|
||||
try {
|
||||
Directory dir = new FaultyFSDirectory(indexDir);
|
||||
|
|
|
@ -1139,7 +1139,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
}
|
||||
|
||||
public void testOpenReaderAfterDelete() throws IOException {
|
||||
File dirFile = new File(TEMP_DIR, "deletetest");
|
||||
File dirFile = _TestUtil.getTempDir("deletetest");
|
||||
Directory dir = newFSDirectory(dirFile);
|
||||
try {
|
||||
IndexReader.open(dir, false);
|
||||
|
|
|
@ -1090,7 +1090,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
indexDir = new File(TEMP_DIR, "IndexReaderReopen");
|
||||
indexDir = _TestUtil.getTempDir("IndexReaderReopen");
|
||||
}
|
||||
|
||||
public void testCloseOrig() throws Throwable {
|
||||
|
|
|
@ -2756,7 +2756,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// Tests that if FSDir is opened w/ a NoLockFactory (or SingleInstanceLF),
|
||||
// then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
|
||||
// when listAll() was called in IndexFileDeleter.
|
||||
Directory dir = newFSDirectory(new File(TEMP_DIR, "emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
|
||||
Directory dir = newFSDirectory(_TestUtil.getTempDir("emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
|
||||
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.File;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
|
@ -39,7 +40,7 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
if (this.__test_dir == null) {
|
||||
this.__test_dir = new File(TEMP_DIR, "testIndexWriter");
|
||||
this.__test_dir = _TestUtil.getTempDir("testIndexWriter");
|
||||
|
||||
if (this.__test_dir.exists()) {
|
||||
throw new IOException("test directory \"" + this.__test_dir.getPath() + "\" already exists (please remove by hand)");
|
||||
|
|
|
@ -41,7 +41,7 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
tempDir = File.createTempFile("jrecrash", "tmp", TEMP_DIR);
|
||||
tempDir = _TestUtil.getTempDir("jrecrash");
|
||||
tempDir.delete();
|
||||
tempDir.mkdir();
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ public class TestLongPostings extends LuceneTestCase {
|
|||
|
||||
// Don't use _TestUtil.getTempDir so that we own the
|
||||
// randomness (ie same seed will point to same dir):
|
||||
Directory dir = newFSDirectory(new File(LuceneTestCase.TEMP_DIR, "longpostings" + "." + random.nextLong()));
|
||||
Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random.nextLong()));
|
||||
|
||||
final int NUM_DOCS = (int) ((TEST_NIGHTLY ? 4e6 : (RANDOM_MULTIPLIER*2e4)) * (1+random.nextDouble()));
|
||||
|
||||
|
|
|
@ -240,7 +240,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSetBufferSize() throws IOException {
|
||||
File indexDir = new File(TEMP_DIR, "testSetBufferSize");
|
||||
File indexDir = _TestUtil.getTempDir("testSetBufferSize");
|
||||
MockFSDirectory dir = new MockFSDirectory(indexDir, random);
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(
|
||||
|
|
|
@ -42,7 +42,7 @@ public class TestDirectory extends LuceneTestCase {
|
|||
// Test that different instances of FSDirectory can coexist on the same
|
||||
// path, can read, write, and lock files.
|
||||
public void testDirectInstantiation() throws Exception {
|
||||
File path = new File(TEMP_DIR, "testDirectInstantiation");
|
||||
File path = _TestUtil.getTempDir("testDirectInstantiation");
|
||||
|
||||
int sz = 3;
|
||||
Directory[] dirs = new Directory[sz];
|
||||
|
@ -134,7 +134,7 @@ public class TestDirectory extends LuceneTestCase {
|
|||
|
||||
// LUCENE-1468
|
||||
public void testFSDirectoryFilter() throws IOException {
|
||||
checkDirectoryFilter(newFSDirectory(new File(TEMP_DIR,"test")));
|
||||
checkDirectoryFilter(newFSDirectory(_TestUtil.getTempDir("test")));
|
||||
}
|
||||
|
||||
// LUCENE-1468
|
||||
|
@ -151,7 +151,7 @@ public class TestDirectory extends LuceneTestCase {
|
|||
|
||||
// LUCENE-1468
|
||||
public void testCopySubdir() throws Throwable {
|
||||
File path = new File(TEMP_DIR, "testsubdir");
|
||||
File path = _TestUtil.getTempDir("testsubdir");
|
||||
try {
|
||||
path.mkdirs();
|
||||
new File(path, "subdir").mkdirs();
|
||||
|
@ -164,7 +164,7 @@ public class TestDirectory extends LuceneTestCase {
|
|||
|
||||
// LUCENE-1468
|
||||
public void testNotDirectory() throws Throwable {
|
||||
File path = new File(TEMP_DIR, "testnotdir");
|
||||
File path = _TestUtil.getTempDir("testnotdir");
|
||||
Directory fsDir = new SimpleFSDirectory(path, null);
|
||||
try {
|
||||
IndexOutput out = fsDir.createOutput("afile");
|
||||
|
|
|
@ -41,7 +41,7 @@ public class TestMultiMMap extends LuceneTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
workDir = new File(TEMP_DIR, "TestMultiMMap");
|
||||
workDir = _TestUtil.getTempDir("TestMultiMMap");
|
||||
workDir.mkdirs();
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.ObjectOutputStream;
|
|||
import java.io.ByteArrayOutputStream;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -49,7 +50,7 @@ public class TestRAMDirectory extends LuceneTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
indexDir = new File(TEMP_DIR, "RAMDirIndex");
|
||||
indexDir = _TestUtil.getTempDir("RAMDirIndex");
|
||||
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.store;
|
|||
import java.io.File;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -59,7 +60,7 @@ public class TestWindowsMMap extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private final static String storePathname =
|
||||
new File(TEMP_DIR,"testLuceneMmap").getAbsolutePath();
|
||||
_TestUtil.getTempDir("testLuceneMmap").getAbsolutePath();
|
||||
|
||||
public void testMmapIndex() throws Exception {
|
||||
// sometimes the directory is not cleaned by rmDir, because on Windows it
|
||||
|
|
|
@ -147,4 +147,14 @@ public class TestAttributeSource extends LuceneTestCase {
|
|||
fail("Should throw IllegalArgumentException");
|
||||
} catch (IllegalArgumentException iae) {}
|
||||
}
|
||||
|
||||
public void testLUCENE_3042() throws Exception {
|
||||
final AttributeSource src1 = new AttributeSource();
|
||||
src1.addAttribute(CharTermAttribute.class).append("foo");
|
||||
int hash1 = src1.hashCode(); // this triggers a cached state
|
||||
final AttributeSource src2 = new AttributeSource(src1);
|
||||
src2.addAttribute(TypeAttribute.class).setType("bar");
|
||||
assertTrue("The hashCode is identical, so the captured state was preserved.", hash1 != src1.hashCode());
|
||||
assertEquals(src2.hashCode(), src1.hashCode());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,8 @@ public enum LicenseType {
|
|||
MPL("Mozilla Public License", false), //NOT SURE on the required notice
|
||||
PD("Public Domain", false),
|
||||
//SUNBCLA("Sun Binary Code License Agreement"),
|
||||
SUN("Sun Open Source License", false)
|
||||
SUN("Sun Open Source License", false),
|
||||
FAKE("FAKE license - not needed", false)
|
||||
;
|
||||
|
||||
private String display;
|
||||
|
|
|
@ -105,8 +105,10 @@ New Features
|
|||
levenshtein automata. (rmuir)
|
||||
|
||||
* SOLR-1873: SolrCloud - added shared/central config and core/shard managment via zookeeper,
|
||||
built-in load balancing, and infrastructure for future SolrCloud work.
|
||||
(yonik, Mark Miller)
|
||||
built-in load balancing, and infrastructure for future SolrCloud work. (yonik, Mark Miller)
|
||||
Additional Work:
|
||||
SOLR-2324: SolrCloud solr.xml parameters are not persisted by CoreContainer.
|
||||
(Massimo Schiavon, Mark Miller)
|
||||
|
||||
* SOLR-1729: Evaluation of NOW for date math is done only once per request for
|
||||
consistency, and is also propagated to shards in distributed search.
|
||||
|
@ -129,6 +131,14 @@ New Features
|
|||
* SOLR-2335: New 'field("...")' function syntax for refering to complex
|
||||
field names (containing whitespace or special characters) in functions.
|
||||
|
||||
* SOLR-1709: Distributed support for Date and Numeric Range Faceting
|
||||
(Peter Sturge, David Smiley, hossman)
|
||||
|
||||
* SOLR-2383: /browse improvements: generalize range and date facet display
|
||||
(Jan Høydahl via yonik)
|
||||
|
||||
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
|
@ -218,6 +228,8 @@ Other Changes
|
|||
and publish binary, javadoc, and source test-framework jars.
|
||||
(Drew Farris, Robert Muir, Steve Rowe)
|
||||
|
||||
* SOLR-2461: QuerySenderListener and AbstractSolrEventListener are
|
||||
now public (hossman)
|
||||
|
||||
Documentation
|
||||
----------------------
|
||||
|
@ -237,6 +249,10 @@ Carrot2 3.4.2
|
|||
|
||||
Upgrading from Solr 3.1
|
||||
----------------------
|
||||
|
||||
* The updateRequestProcessorChain for a RequestHandler is now defined
|
||||
with update.chain rather than update.processor. The latter still works,
|
||||
but has been deprecated.
|
||||
|
||||
Detailed Change List
|
||||
----------------------
|
||||
|
@ -256,9 +272,42 @@ Bug Fixes
|
|||
* SOLR-2455: Prevent double submit of forms in admin interface.
|
||||
(Jeffrey Chang via uschindler)
|
||||
|
||||
* SOLR-2464: Fix potential slowness in QueryValueSource (the query() function) when
|
||||
the query is very sparse and may not match any documents in a segment. (yonik)
|
||||
|
||||
* SOLR-2469: When using java replication with replicateAfter=startup, the first
|
||||
commit point on server startup is never removed. (yonik)
|
||||
|
||||
* SOLR-2466: SolrJ's CommonsHttpSolrServer would retry requests on failure, regardless
|
||||
of the configured maxRetries, due to HttpClient having it's own retry mechanism
|
||||
by default. The retryCount of HttpClient is now set to 0, and SolrJ does
|
||||
the retry. (yonik)
|
||||
|
||||
* SOLR-2409: edismax parser - treat the text of a fielded query as a literal if the
|
||||
fieldname does not exist. For example Mission: Impossible should not search on
|
||||
the "Mission" field unless it's a valid field in the schema. (Ryan McKinley, yonik)
|
||||
|
||||
* SOLR-2403: facet.sort=index reported incorrect results for distributed search
|
||||
in a number of scenarios when facet.mincount>0. This patch also adds some
|
||||
performance/algorithmic improvements when (facet.sort=count && facet.mincount=1
|
||||
&& facet.limit=-1) and when (facet.sort=index && facet.mincount>0) (yonik)
|
||||
|
||||
* SOLR-2333: The "rename" core admin action does not persist the new name to solr.xml
|
||||
(Rasmus Hahn, Paul R. Brown via Mark Miller)
|
||||
|
||||
* SOLR-2390: Performance of usePhraseHighlighter is terrible on very large Documents,
|
||||
regardless of hl.maxDocCharsToAnalyze. (Mark Miller)
|
||||
|
||||
* SOLR-2474: The helper TokenStreams in analysis.jsp and AnalysisRequestHandlerBase
|
||||
did not clear all attributes so they displayed incorrect attribute values for tokens
|
||||
in later filter stages. (uschindler, rmuir, yonik)
|
||||
|
||||
Other Changes
|
||||
----------------------
|
||||
|
||||
* SOLR-2105: Rename RequestHandler param 'update.processor' to 'update.chain'.
|
||||
(Jan Høydahl via Mark Miller)
|
||||
|
||||
Build
|
||||
----------------------
|
||||
|
||||
|
|
|
@ -120,7 +120,7 @@
|
|||
<!-- Compile the project. -->
|
||||
<target name="compile"
|
||||
description="Compile the source code."
|
||||
depends="validate-solr, compile-solrj">
|
||||
depends="compile-solrj">
|
||||
|
||||
<solr-javac destdir="${dest}/solr"
|
||||
classpathref="compile.classpath.solrj">
|
||||
|
@ -394,7 +394,7 @@
|
|||
<!-- Run contrib unit tests. -->
|
||||
<target name="test"
|
||||
description="Runs the core unit tests."
|
||||
depends="test-core, test-contrib, test-jsp" />
|
||||
depends="validate-solr, test-core, test-contrib, test-jsp" />
|
||||
|
||||
<target name="junit" depends="compileTests,junit-mkdir,junit-sequential,junit-parallel"/>
|
||||
|
||||
|
|
|
@ -194,7 +194,7 @@ public class DataImportHandler extends RequestHandlerBase implements
|
|||
IMPORT_CMD.equals(command)) {
|
||||
|
||||
UpdateRequestProcessorChain processorChain =
|
||||
req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_PROCESSOR));
|
||||
req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_CHAIN));
|
||||
UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
|
||||
SolrResourceLoader loader = req.getCore().getResourceLoader();
|
||||
SolrWriter sw = getSolrWriter(processor, loader, requestParams, req);
|
||||
|
|
|
@ -108,7 +108,7 @@ public class UIMAUpdateRequestProcessorTest extends SolrTestCaseJ4 {
|
|||
|
||||
private void addDoc(String doc) throws Exception {
|
||||
Map<String, String[]> params = new HashMap<String, String[]>();
|
||||
params.put(UpdateParams.UPDATE_PROCESSOR, new String[] { "uima" });
|
||||
params.put(UpdateParams.UPDATE_CHAIN, new String[] { "uima" });
|
||||
MultiMapSolrParams mmparams = new MultiMapSolrParams(params);
|
||||
SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) {
|
||||
};
|
||||
|
|
|
@ -855,7 +855,7 @@
|
|||
-->
|
||||
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler">
|
||||
<lst name="defaults">
|
||||
<str name="update.processor">uima</str>
|
||||
<str name="update.chain">uima</str>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
|
||||
|
@ -997,7 +997,7 @@
|
|||
anyway. You have to link the chain to an update handler above to use
|
||||
it ie: <requestHandler name="/update
|
||||
"class="solr.XmlUpdateRequestHandler"> <lst name="defaults"> <str
|
||||
name="update.processor">dedupe</str> </lst> </requestHandler>
|
||||
name="update.chain">dedupe</str> </lst> </requestHandler>
|
||||
-->
|
||||
|
||||
<updateRequestProcessorChain name="uima">
|
||||
|
|
|
@ -774,6 +774,7 @@
|
|||
<str name="q.alt">*:*</str>
|
||||
<str name="rows">10</str>
|
||||
<str name="fl">*,score</str>
|
||||
|
||||
<str name="mlt.qf">
|
||||
text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
|
||||
</str>
|
||||
|
@ -792,6 +793,8 @@
|
|||
<int name="facet.range.start">0</int>
|
||||
<int name="facet.range.end">600</int>
|
||||
<int name="facet.range.gap">50</int>
|
||||
<str name="facet.range">popularity</str>
|
||||
<int name="f.popularity.facet.range.gap">3</int>
|
||||
<str name="facet.range.other">after</str>
|
||||
<str name="facet.date">manufacturedate_dt</str>
|
||||
<str name="facet.date.start">NOW/YEAR-10YEARS</str>
|
||||
|
@ -841,7 +844,7 @@
|
|||
-->
|
||||
<!--
|
||||
<lst name="defaults">
|
||||
<str name="update.processor">dedupe</str>
|
||||
<str name="update.chain">dedupe</str>
|
||||
</lst>
|
||||
-->
|
||||
</requestHandler>
|
||||
|
|
|
@ -88,13 +88,13 @@
|
|||
<ul>
|
||||
#foreach ($facet in $field)
|
||||
#set($theDate = $date.toDate("yyyy-MM-dd'T'HH:mm:ss'Z'", $facet.key))
|
||||
#set($value = '["' + $facet.key + '" TO "' + $facet.key + $gap + '"]')
|
||||
#set($value = '["' + $facet.key + '" TO "' + $facet.key + $gap + '"}')
|
||||
|
||||
#set($facetURL = "#url_for_facet_date_filter($fieldName, $value)")
|
||||
#if ($facetURL != '')
|
||||
#if ($facet.key != "gap" && $facet.key != "start" && $facet.key != "end" && $facet.key != "before" && $facet.key != "after")
|
||||
|
||||
<li><a href="$facetURL">$date.format('MMM yyyy', $theDate)</a> ($facet.value)</li>
|
||||
<li><a href="$facetURL">$date.format('MMM yyyy', $theDate) $gap</a> ($facet.value)</li>
|
||||
#end
|
||||
#if ($facet.key == "before" && $facet.value > 0)
|
||||
<li><a href="$facetURL">Before</a> ($facet.value)</li>
|
||||
|
@ -113,20 +113,20 @@
|
|||
<span class="facet-field">$display</span>
|
||||
<ul>
|
||||
#if($before && $before != "")
|
||||
#set($value = "[* TO " + $start + "]")
|
||||
#set($value = "[* TO " + $start + "}")
|
||||
#set($facetURL = "#url_for_facet_range_filter($fieldName, $value)")
|
||||
<li><a href="$facetURL">Less than $start</a> ($before)</li>
|
||||
#end
|
||||
#foreach ($facet in $field)
|
||||
#set($rangeEnd = $math.add($facet.key, $gap))
|
||||
#set($value = "[" + $facet.key + " TO " + $rangeEnd + "]")
|
||||
#set($value = "[" + $facet.key + " TO " + $rangeEnd + "}")
|
||||
#set($facetURL = "#url_for_facet_range_filter($fieldName, $value)")
|
||||
#if ($facetURL != '')
|
||||
<li><a href="$facetURL">$facet.key</a> ($facet.value)</li>
|
||||
<li><a href="$facetURL">$facet.key - $rangeEnd</a> ($facet.value)</li>
|
||||
#end
|
||||
#end
|
||||
#if($end && $end != "")
|
||||
#set($value = "[" + $end + " TO *]")
|
||||
#if($end && $end != "" && $after > 0)
|
||||
#set($value = "[" + $end + " TO *}")
|
||||
#set($facetURL = "#url_for_facet_range_filter($fieldName, $value)")
|
||||
<li><a href="$facetURL">More than $math.toNumber($end)</a> ($after)</li>
|
||||
#end
|
||||
|
@ -180,4 +180,8 @@
|
|||
$v
|
||||
#end
|
||||
#end
|
||||
#end
|
||||
#end
|
||||
|
||||
#macro(capitalize $name)
|
||||
${name.substring(0,1).toUpperCase()}${name.substring(1)}
|
||||
#end
|
|
@ -1,5 +1,9 @@
|
|||
##TODO: Generically deal with all dates
|
||||
<h2 #annTitle("Facets generated by adding &facet.date= to the request")>Date Facets</h2>
|
||||
#set($field = $response.response.facet_counts.facet_dates.manufacturedate_dt)
|
||||
#set($gap = $response.response.facet_counts.facet_dates.manufacturedate_dt.gap)
|
||||
#display_facet_date($field, "Manufacture Date", "manufacturedate_dt", $gap)
|
||||
#foreach ($field in $response.response.facet_counts.facet_dates)
|
||||
#set($name = $field.key)
|
||||
#set($display = "#capitalize($name)")
|
||||
#set($f = $field.value)
|
||||
#set($gap = $field.value.gap)
|
||||
#display_facet_date($f, $display, $name, $gap)
|
||||
#end
|
|
@ -1,10 +1,12 @@
|
|||
<h2 #annTitle("Facets generated by adding &facet.range= to the request")>Range Facets</h2>
|
||||
#set($field = $response.response.facet_counts.facet_ranges.price.counts)
|
||||
#set($start = $response.response.facet_counts.facet_ranges.price.start)
|
||||
#set($end = $response.response.facet_counts.facet_ranges.price.end)
|
||||
#set($gap = $response.response.facet_counts.facet_ranges.price.gap)
|
||||
#set($before = $response.response.facet_counts.facet_ranges.price.before)
|
||||
#set($after = $response.response.facet_counts.facet_ranges.price.after)
|
||||
##TODO: Make this display the "range", not just the lower value
|
||||
##TODO: Have a generic way to deal with ranges
|
||||
#display_facet_range($field, "Price (in $)", "price", $start, $end, $gap, $before, $after)
|
||||
#foreach ($field in $response.response.facet_counts.facet_ranges)
|
||||
#set($name = $field.key)
|
||||
#set($display = "#capitalize($name)")
|
||||
#set($f = $field.value.counts)
|
||||
#set($start = $field.value.start)
|
||||
#set($end = $field.value.end)
|
||||
#set($gap = $field.value.gap)
|
||||
#set($before = $field.value.before)
|
||||
#set($after = $field.value.after)
|
||||
#display_facet_range($f, $display, $name, $start, $end, $gap, $before, $after)
|
||||
#end
|
|
@ -18,6 +18,18 @@
|
|||
margin-left: 20px;
|
||||
}
|
||||
|
||||
.parsed_query_header {
|
||||
font-family: Helvetica, Arial, sans-serif;
|
||||
font-size: 10pt;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.parsed_query {
|
||||
font-family: Courier, Courier New, monospaced;
|
||||
font-size: 10pt;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: Helvetica, Arial, sans-serif;
|
||||
font-size: 10pt;
|
||||
|
|
|
@ -27,14 +27,16 @@
|
|||
#end
|
||||
#end
|
||||
</div>
|
||||
<div class="parsed_query_header">
|
||||
#if($request.params.get('debugQuery'))
|
||||
<a href="#" onclick='jQuery(this).siblings("pre").toggle(); return false;'>toggle parsed query</a>
|
||||
<pre style="display:none">$response.response.debug.parsedquery</pre>
|
||||
<a href="#" onclick='jQuery(this).siblings("div").toggle(); return false;'>toggle parsed query</a>
|
||||
<div class="parsed_query" style="display:none">$response.response.debug.parsedquery</div>
|
||||
#end
|
||||
#set($queryOpts = $request.params.get("queryOpts"))
|
||||
#if($queryOpts && $queryOpts != "")
|
||||
<input type="hidden" name="queryOpts" value="$queryOpts"/>
|
||||
#end
|
||||
</div>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
|
|
|
@ -61,7 +61,7 @@ public class SolrDocumentList extends ArrayList<SolrDocument>
|
|||
public String toString() {
|
||||
return "{numFound="+numFound
|
||||
+",start="+start
|
||||
+ (maxScore!=null ? ""+maxScore : "")
|
||||
+ (maxScore!=null ? ",maxScore="+maxScore : "")
|
||||
+",docs="+super.toString()
|
||||
+"}";
|
||||
}
|
||||
|
|
|
@ -43,8 +43,11 @@ public interface UpdateParams
|
|||
/** Rollback update commands */
|
||||
public static String ROLLBACK = "rollback";
|
||||
|
||||
/** Select the update processor to use. A RequestHandler may or may not respect this parameter */
|
||||
public static final String UPDATE_PROCESSOR = "update.processor";
|
||||
/** Select the update processor chain to use. A RequestHandler may or may not respect this parameter */
|
||||
public static final String UPDATE_CHAIN = "update.chain";
|
||||
// TODO: Include the old deprecated param, for removal in Solr 4.0
|
||||
public static final String UPDATE_CHAIN_DEPRECATED = "update.processor";
|
||||
|
||||
/**
|
||||
* If optimizing, set the maximum number of segments left in the index after optimization. 1 is the default (and is equivalent to calling IndexWriter.optimize() in Lucene).
|
||||
*/
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.solr.search.SolrIndexSearcher;
|
|||
|
||||
/**
|
||||
*/
|
||||
class AbstractSolrEventListener implements SolrEventListener {
|
||||
public class AbstractSolrEventListener implements SolrEventListener {
|
||||
protected final SolrCore core;
|
||||
public AbstractSolrEventListener(SolrCore core) {
|
||||
this.core = core;
|
||||
|
|
|
@ -79,6 +79,7 @@ public class CoreContainer
|
|||
protected Map<String ,IndexSchema> indexSchemaCache;
|
||||
protected String adminHandler;
|
||||
protected boolean shareSchema;
|
||||
protected Integer zkClientTimeout;
|
||||
protected String solrHome;
|
||||
protected String defaultCoreName = "";
|
||||
private ZkController zkController;
|
||||
|
@ -313,7 +314,7 @@ public class CoreContainer
|
|||
zkHost = cfg.get("solr/@zkHost" , null);
|
||||
adminPath = cfg.get("solr/cores/@adminPath", null);
|
||||
shareSchema = cfg.getBool("solr/cores/@shareSchema", false);
|
||||
int zkClientTimeout = cfg.getInt("solr/cores/@zkClientTimeout", 10000);
|
||||
zkClientTimeout = cfg.getInt("solr/cores/@zkClientTimeout", 10000);
|
||||
|
||||
hostPort = System.getProperty("hostPort");
|
||||
if (hostPort == null) {
|
||||
|
@ -500,7 +501,12 @@ public class CoreContainer
|
|||
SolrCore old = null;
|
||||
synchronized (cores) {
|
||||
old = cores.put(name, core);
|
||||
/*
|
||||
* set both the name of the descriptor and the name of the
|
||||
* core, since the descriptors name is used for persisting.
|
||||
*/
|
||||
core.setName(name);
|
||||
core.getCoreDescriptor().name = name;
|
||||
}
|
||||
|
||||
if (zkController != null) {
|
||||
|
@ -884,6 +890,7 @@ public class CoreContainer
|
|||
if (this.libDir != null) {
|
||||
writeAttribute(w,"sharedLib",libDir);
|
||||
}
|
||||
if(zkHost != null) writeAttribute(w, "zkHost", zkHost);
|
||||
writeAttribute(w,"persistent",isPersistent());
|
||||
w.write(">\n");
|
||||
|
||||
|
@ -892,9 +899,13 @@ public class CoreContainer
|
|||
}
|
||||
w.write(" <cores");
|
||||
writeAttribute(w, "adminPath",adminPath);
|
||||
if(adminHandler != null) writeAttribute(w, "adminHandler",adminHandler);
|
||||
if(shareSchema) writeAttribute(w, "shareSchema","true");
|
||||
if(!defaultCoreName.equals("")) writeAttribute(w, "defaultCoreName",defaultCoreName);
|
||||
if(adminHandler != null) writeAttribute(w, "adminHandler", adminHandler);
|
||||
if(shareSchema) writeAttribute(w, "shareSchema", "true");
|
||||
if(!defaultCoreName.equals("")) writeAttribute(w, "defaultCoreName", defaultCoreName);
|
||||
if(host != null) writeAttribute(w, "host", host);
|
||||
if(hostPort != null) writeAttribute(w, "hostPort", hostPort);
|
||||
if(zkClientTimeout != null) writeAttribute(w, "zkClientTimeout", zkClientTimeout);
|
||||
if(hostContext != null) writeAttribute(w, "hostContext", hostContext);
|
||||
w.write(">\n");
|
||||
|
||||
synchronized(cores) {
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.List;
|
|||
/**
|
||||
* @version $Id$
|
||||
*/
|
||||
class QuerySenderListener extends AbstractSolrEventListener {
|
||||
public class QuerySenderListener extends AbstractSolrEventListener {
|
||||
public QuerySenderListener(SolrCore core) {
|
||||
super(core);
|
||||
}
|
||||
|
|
|
@ -337,6 +337,7 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase {
|
|||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
if (tokenIterator.hasNext()) {
|
||||
clearAttributes();
|
||||
AttributeSource next = tokenIterator.next();
|
||||
Iterator<Class<? extends Attribute>> atts = next.getAttributeClassesIterator();
|
||||
while (atts.hasNext()) // make sure all att impls in the token exist here
|
||||
|
|
|
@ -24,6 +24,8 @@ import org.apache.solr.request.SolrQueryRequest;
|
|||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.update.processor.UpdateRequestProcessor;
|
||||
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -31,12 +33,22 @@ import org.apache.solr.update.processor.UpdateRequestProcessorChain;
|
|||
*
|
||||
**/
|
||||
public abstract class ContentStreamHandlerBase extends RequestHandlerBase {
|
||||
public static Logger log = LoggerFactory.getLogger(XmlUpdateRequestHandler.class);
|
||||
|
||||
@Override
|
||||
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
|
||||
SolrParams params = req.getParams();
|
||||
String updateChainName = null;
|
||||
if(params.get(UpdateParams.UPDATE_CHAIN_DEPRECATED) != null) {
|
||||
log.warn("Use of deprecated update request parameter "+UpdateParams.UPDATE_CHAIN_DEPRECATED+
|
||||
" detected. Please use the new parameter "+UpdateParams.UPDATE_CHAIN+" instead, as support"+
|
||||
" for "+UpdateParams.UPDATE_CHAIN_DEPRECATED+" will be removed in a later version.");
|
||||
updateChainName = params.get(UpdateParams.UPDATE_CHAIN_DEPRECATED);
|
||||
} else {
|
||||
updateChainName = params.get(UpdateParams.UPDATE_CHAIN);
|
||||
}
|
||||
UpdateRequestProcessorChain processorChain =
|
||||
req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_PROCESSOR));
|
||||
req.getCore().getUpdateProcessingChain(updateChainName);
|
||||
|
||||
UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
|
||||
|
||||
|
|
|
@ -843,9 +843,13 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
indexCommitPoint = reader.getIndexCommit();
|
||||
}
|
||||
} finally {
|
||||
// We don't need to save commit points for replication, the SolrDeletionPolicy
|
||||
// always saves the last commit point (and the last optimized commit point, if needed)
|
||||
/***
|
||||
if(indexCommitPoint != null){
|
||||
core.getDeletionPolicy().saveCommitPoint(indexCommitPoint.getVersion());
|
||||
}
|
||||
***/
|
||||
}
|
||||
}
|
||||
if (core.getUpdateHandler() instanceof DirectUpdateHandler2) {
|
||||
|
|
|
@ -190,7 +190,7 @@ public class CoreAdminHandler extends RequestHandlerBase {
|
|||
}
|
||||
|
||||
UpdateRequestProcessorChain processorChain =
|
||||
core.getUpdateProcessingChain(params.get(UpdateParams.UPDATE_PROCESSOR));
|
||||
core.getUpdateProcessingChain(params.get(UpdateParams.UPDATE_CHAIN));
|
||||
wrappedReq = new LocalSolrQueryRequest(core, req.getParams());
|
||||
UpdateRequestProcessor processor =
|
||||
processorChain.createProcessor(wrappedReq, rsp);
|
||||
|
|
|
@ -17,23 +17,23 @@
|
|||
|
||||
package org.apache.solr.handler.component;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.*;
|
||||
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.params.FacetParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.common.util.StrUtils;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.request.SimpleFacets;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.apache.solr.search.QueryParsing;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.solr.search.QueryParsing;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* TODO!
|
||||
|
@ -222,11 +222,37 @@ public class FacetComponent extends SearchComponent
|
|||
sreq.params.remove(paramStart + FacetParams.FACET_MINCOUNT);
|
||||
sreq.params.remove(paramStart + FacetParams.FACET_OFFSET);
|
||||
|
||||
dff.initialLimit = dff.offset + dff.limit;
|
||||
dff.initialLimit = dff.limit <= 0 ? dff.limit : dff.offset + dff.limit;
|
||||
|
||||
if(dff.sort.equals(FacetParams.FACET_SORT_COUNT) && dff.limit > 0) {
|
||||
// set the initial limit higher to increase accuracy
|
||||
dff.initialLimit = (int)(dff.initialLimit * 1.5) + 10;
|
||||
if (dff.sort.equals(FacetParams.FACET_SORT_COUNT)) {
|
||||
if (dff.limit > 0) {
|
||||
// set the initial limit higher to increase accuracy
|
||||
dff.initialLimit = (int)(dff.initialLimit * 1.5) + 10;
|
||||
dff.initialMincount = 0; // TODO: we could change this to 1, but would then need more refinement for small facet result sets?
|
||||
} else {
|
||||
// if limit==-1, then no need to artificially lower mincount to 0 if it's 1
|
||||
dff.initialMincount = Math.min(dff.minCount, 1);
|
||||
}
|
||||
} else {
|
||||
// we're sorting by index order.
|
||||
// if minCount==0, we should always be able to get accurate results w/o over-requesting or refining
|
||||
// if minCount==1, we should be able to get accurate results w/o over-requesting, but we'll need to refine
|
||||
// if minCount==n (>1), we can set the initialMincount to minCount/nShards, rounded up.
|
||||
// For example, we know that if minCount=10 and we have 3 shards, then at least one shard must have a count of 4 for the term
|
||||
// For the minCount>1 case, we can generate too short of a list (miss terms at the end of the list) unless limit==-1
|
||||
// For example: each shard could produce a list of top 10, but some of those could fail to make it into the combined list (i.e.
|
||||
// we needed to go beyond the top 10 to generate the top 10 combined). Overrequesting can help a little here, but not as
|
||||
// much as when sorting by count.
|
||||
if (dff.minCount <= 1) {
|
||||
dff.initialMincount = dff.minCount;
|
||||
} else {
|
||||
dff.initialMincount = (int)Math.ceil((double)dff.minCount / rb.slices.length);
|
||||
// dff.initialMincount = 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (dff.initialMincount != 0) {
|
||||
sreq.params.set(paramStart + FacetParams.FACET_MINCOUNT, dff.initialMincount);
|
||||
}
|
||||
|
||||
// Currently this is for testing only and allows overriding of the
|
||||
|
@ -286,8 +312,95 @@ public class FacetComponent extends SearchComponent
|
|||
dff.add(shardNum, (NamedList)facet_fields.get(dff.getKey()), dff.initialLimit);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Distributed facet_dates
|
||||
//
|
||||
// The implementation below uses the first encountered shard's
|
||||
// facet_dates as the basis for subsequent shards' data to be merged.
|
||||
// (the "NOW" param should ensure consistency)
|
||||
@SuppressWarnings("unchecked")
|
||||
SimpleOrderedMap<SimpleOrderedMap<Object>> facet_dates =
|
||||
(SimpleOrderedMap<SimpleOrderedMap<Object>>)
|
||||
facet_counts.get("facet_dates");
|
||||
|
||||
if (facet_dates != null) {
|
||||
|
||||
// go through each facet_date
|
||||
for (Map.Entry<String,SimpleOrderedMap<Object>> entry : facet_dates) {
|
||||
final String field = entry.getKey();
|
||||
if (fi.dateFacets.get(field) == null) {
|
||||
// first time we've seen this field, no merging
|
||||
fi.dateFacets.add(field, entry.getValue());
|
||||
|
||||
} else {
|
||||
// not the first time, merge current field
|
||||
|
||||
SimpleOrderedMap<Object> shardFieldValues
|
||||
= entry.getValue();
|
||||
SimpleOrderedMap<Object> existFieldValues
|
||||
= fi.dateFacets.get(field);
|
||||
|
||||
for (Map.Entry<String,Object> existPair : existFieldValues) {
|
||||
final String key = existPair.getKey();
|
||||
if (key.equals("gap") ||
|
||||
key.equals("end") ||
|
||||
key.equals("start")) {
|
||||
// we can skip these, must all be the same across shards
|
||||
continue;
|
||||
}
|
||||
// can be null if inconsistencies in shards responses
|
||||
Integer newValue = (Integer) shardFieldValues.get(key);
|
||||
if (null != newValue) {
|
||||
Integer oldValue = ((Integer) existPair.getValue());
|
||||
existPair.setValue(oldValue + newValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Distributed facet_ranges
|
||||
//
|
||||
// The implementation below uses the first encountered shard's
|
||||
// facet_ranges as the basis for subsequent shards' data to be merged.
|
||||
@SuppressWarnings("unchecked")
|
||||
SimpleOrderedMap<SimpleOrderedMap<Object>> facet_ranges =
|
||||
(SimpleOrderedMap<SimpleOrderedMap<Object>>)
|
||||
facet_counts.get("facet_ranges");
|
||||
|
||||
if (facet_ranges != null) {
|
||||
|
||||
// go through each facet_range
|
||||
for (Map.Entry<String,SimpleOrderedMap<Object>> entry : facet_ranges) {
|
||||
final String field = entry.getKey();
|
||||
if (fi.rangeFacets.get(field) == null) {
|
||||
// first time we've seen this field, no merging
|
||||
fi.rangeFacets.add(field, entry.getValue());
|
||||
|
||||
} else {
|
||||
// not the first time, merge current field counts
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
NamedList<Integer> shardFieldValues
|
||||
= (NamedList<Integer>) entry.getValue().get("counts");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
NamedList<Integer> existFieldValues
|
||||
= (NamedList<Integer>) fi.rangeFacets.get(field).get("counts");
|
||||
|
||||
for (Map.Entry<String,Integer> existPair : existFieldValues) {
|
||||
final String key = existPair.getKey();
|
||||
// can be null if inconsistencies in shards responses
|
||||
Integer newValue = shardFieldValues.get(key);
|
||||
if (null != newValue) {
|
||||
Integer oldValue = existPair.getValue();
|
||||
existPair.setValue(oldValue + newValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// This code currently assumes that there will be only a single
|
||||
|
@ -296,15 +409,18 @@ public class FacetComponent extends SearchComponent
|
|||
//
|
||||
|
||||
for (DistribFieldFacet dff : fi.facets.values()) {
|
||||
if (dff.limit <= 0) continue; // no need to check these facets for refinement
|
||||
if (dff.minCount <= 1 && dff.sort.equals(FacetParams.FACET_SORT_INDEX)) continue;
|
||||
// no need to check these facets for refinement
|
||||
if (dff.initialLimit <= 0 && dff.initialMincount == 0) continue;
|
||||
|
||||
@SuppressWarnings("unchecked") // generic array's are anoying
|
||||
// only other case where index-sort doesn't need refinement is if minCount==0
|
||||
if (dff.minCount == 0 && dff.sort.equals(FacetParams.FACET_SORT_INDEX)) continue;
|
||||
|
||||
@SuppressWarnings("unchecked") // generic array's are annoying
|
||||
List<String>[] tmp = (List<String>[]) new List[rb.shards.length];
|
||||
dff._toRefine = tmp;
|
||||
|
||||
ShardFacetCount[] counts = dff.getCountSorted();
|
||||
int ntop = Math.min(counts.length, dff.offset + dff.limit);
|
||||
int ntop = Math.min(counts.length, dff.limit >= 0 ? dff.offset + dff.limit : Integer.MAX_VALUE);
|
||||
long smallestCount = counts.length == 0 ? 0 : counts[ntop-1].count;
|
||||
|
||||
for (int i=0; i<counts.length; i++) {
|
||||
|
@ -313,8 +429,11 @@ public class FacetComponent extends SearchComponent
|
|||
|
||||
if (i<ntop) {
|
||||
// automatically flag the top values for refinement
|
||||
// this should always be true for facet.sort=index
|
||||
needRefinement = true;
|
||||
} else {
|
||||
// this logic should only be invoked for facet.sort=index (for now)
|
||||
|
||||
// calculate the maximum value that this term may have
|
||||
// and if it is >= smallestCount, then flag for refinement
|
||||
long maxCount = sfc.count;
|
||||
|
@ -422,13 +541,32 @@ public class FacetComponent extends SearchComponent
|
|||
counts = dff.getLexSorted();
|
||||
}
|
||||
|
||||
int end = dff.limit < 0 ? counts.length : Math.min(dff.offset + dff.limit, counts.length);
|
||||
for (int i=dff.offset; i<end; i++) {
|
||||
if (counts[i].count < dff.minCount) {
|
||||
if (countSorted) break; // if sorted by count, we can break out of loop early
|
||||
else continue;
|
||||
if (countSorted) {
|
||||
int end = dff.limit < 0 ? counts.length : Math.min(dff.offset + dff.limit, counts.length);
|
||||
for (int i=dff.offset; i<end; i++) {
|
||||
if (counts[i].count < dff.minCount) {
|
||||
break;
|
||||
}
|
||||
fieldCounts.add(counts[i].name, num(counts[i].count));
|
||||
}
|
||||
} else {
|
||||
int off = dff.offset;
|
||||
int lim = dff.limit >= 0 ? dff.limit : Integer.MAX_VALUE;
|
||||
|
||||
// index order...
|
||||
for (int i=0; i<counts.length; i++) {
|
||||
long count = counts[i].count;
|
||||
if (count < dff.minCount) continue;
|
||||
if (off > 0) {
|
||||
off--;
|
||||
continue;
|
||||
}
|
||||
if (lim <= 0) {
|
||||
break;
|
||||
}
|
||||
lim--;
|
||||
fieldCounts.add(counts[i].name, num(count));
|
||||
}
|
||||
fieldCounts.add(counts[i].name, num(counts[i].count));
|
||||
}
|
||||
|
||||
if (dff.missing) {
|
||||
|
@ -436,9 +574,8 @@ public class FacetComponent extends SearchComponent
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: facet dates & numbers
|
||||
facet_counts.add("facet_dates", new SimpleOrderedMap());
|
||||
facet_counts.add("facet_ranges", new SimpleOrderedMap());
|
||||
facet_counts.add("facet_dates", fi.dateFacets);
|
||||
facet_counts.add("facet_ranges", fi.rangeFacets);
|
||||
|
||||
rb.rsp.add("facet_counts", facet_counts);
|
||||
|
||||
|
@ -490,8 +627,14 @@ public class FacetComponent extends SearchComponent
|
|||
* <b>This API is experimental and subject to change</b>
|
||||
*/
|
||||
public static class FacetInfo {
|
||||
|
||||
public LinkedHashMap<String,QueryFacet> queryFacets;
|
||||
public LinkedHashMap<String,DistribFieldFacet> facets;
|
||||
public SimpleOrderedMap<SimpleOrderedMap<Object>> dateFacets
|
||||
= new SimpleOrderedMap<SimpleOrderedMap<Object>>();
|
||||
public SimpleOrderedMap<SimpleOrderedMap<Object>> rangeFacets
|
||||
= new SimpleOrderedMap<SimpleOrderedMap<Object>>();
|
||||
|
||||
public List<String> exceptionList;
|
||||
|
||||
void parse(SolrParams params, ResponseBuilder rb) {
|
||||
|
@ -631,7 +774,8 @@ public class FacetComponent extends SearchComponent
|
|||
public HashMap<String,ShardFacetCount> counts = new HashMap<String,ShardFacetCount>(128);
|
||||
public int termNum;
|
||||
|
||||
public int initialLimit; // how many terms requested in first phase
|
||||
public int initialLimit; // how many terms requested in first phase
|
||||
public int initialMincount; // mincount param sent to each shard
|
||||
public boolean needRefinements;
|
||||
public ShardFacetCount[] countSorted;
|
||||
|
||||
|
@ -671,11 +815,10 @@ public class FacetComponent extends SearchComponent
|
|||
}
|
||||
}
|
||||
|
||||
// the largest possible missing term is 0 if we received less
|
||||
// than the number requested (provided mincount==0 like it should be for
|
||||
// a shard request)
|
||||
// the largest possible missing term is initialMincount if we received less
|
||||
// than the number requested.
|
||||
if (numRequested<0 || numRequested != 0 && numReceived < numRequested) {
|
||||
last = 0;
|
||||
last = initialMincount;
|
||||
}
|
||||
|
||||
missingMaxPossible += last;
|
||||
|
|
|
@ -435,12 +435,20 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
|
|||
// fall back to analyzer
|
||||
tstream = createAnalyzerTStream(schema, fieldName, docTexts[j]);
|
||||
}
|
||||
|
||||
|
||||
int maxCharsToAnalyze = params.getFieldInt(fieldName,
|
||||
HighlightParams.MAX_CHARS,
|
||||
Highlighter.DEFAULT_MAX_CHARS_TO_ANALYZE);
|
||||
|
||||
Highlighter highlighter;
|
||||
if (Boolean.valueOf(req.getParams().get(HighlightParams.USE_PHRASE_HIGHLIGHTER, "true"))) {
|
||||
// TODO: this is not always necessary - eventually we would like to avoid this wrap
|
||||
// when it is not needed.
|
||||
tstream = new CachingTokenFilter(tstream);
|
||||
if (maxCharsToAnalyze < 0) {
|
||||
tstream = new CachingTokenFilter(tstream);
|
||||
} else {
|
||||
tstream = new CachingTokenFilter(new OffsetLimitTokenFilter(tstream, maxCharsToAnalyze));
|
||||
}
|
||||
|
||||
// get highlighter
|
||||
highlighter = getPhraseHighlighter(query, fieldName, req, (CachingTokenFilter) tstream);
|
||||
|
@ -453,9 +461,6 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
|
|||
highlighter = getHighlighter(query, fieldName, req);
|
||||
}
|
||||
|
||||
int maxCharsToAnalyze = params.getFieldInt(fieldName,
|
||||
HighlightParams.MAX_CHARS,
|
||||
Highlighter.DEFAULT_MAX_CHARS_TO_ANALYZE);
|
||||
if (maxCharsToAnalyze < 0) {
|
||||
highlighter.setMaxDocCharsToAnalyze(docTexts[j].length());
|
||||
} else {
|
||||
|
|
|
@ -316,7 +316,8 @@ class JSONWriter extends TextResponseWriter {
|
|||
if( idx > 0 ) {
|
||||
writeArraySeparator();
|
||||
}
|
||||
|
||||
|
||||
indent();
|
||||
writeMapOpener(doc.size());
|
||||
incLevel();
|
||||
|
||||
|
|
|
@ -90,9 +90,10 @@ public class ValueSourceAugmenter extends DocTransformer
|
|||
}
|
||||
|
||||
int localId = docid - rcontext.docBase;
|
||||
float val = values.floatVal(localId); // TODO: handle all types -- see: SOLR-2443
|
||||
|
||||
doc.setField( name, val );
|
||||
Object val = values.objectVal(localId);
|
||||
if (val != null) {
|
||||
doc.setField( name, val );
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "exception at docid " + docid + " for valuesource " + valueSource, e, false);
|
||||
}
|
||||
|
|
|
@ -486,6 +486,17 @@ class DateFieldSource extends FieldCacheSource {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
int ord=termsIndex.getOrd(doc);
|
||||
if (ord == 0) {
|
||||
return null;
|
||||
} else {
|
||||
BytesRef br = termsIndex.lookup(ord, new BytesRef());
|
||||
return ft.toObject(null, br);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + intVal(doc);
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.util.ReaderUtil;
|
|||
import org.apache.solr.response.TextResponseWriter;
|
||||
import org.apache.solr.search.QParser;
|
||||
import org.apache.solr.search.function.DocValues;
|
||||
import org.apache.solr.search.function.IntDocValues;
|
||||
import org.apache.solr.search.function.ValueSource;
|
||||
|
||||
/**
|
||||
|
@ -157,37 +158,12 @@ public class RandomSortField extends FieldType {
|
|||
|
||||
@Override
|
||||
public DocValues getValues(Map context, final AtomicReaderContext readerContext) throws IOException {
|
||||
return new DocValues() {
|
||||
return new IntDocValues(this) {
|
||||
private final int seed = getSeed(field, readerContext);
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)hash(doc+seed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return hash(doc+seed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)hash(doc+seed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)hash(doc+seed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Integer.toString(hash(doc+seed));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + intVal(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -148,6 +148,12 @@ class SortableDoubleFieldSource extends FieldCacheSource {
|
|||
return Double.toString(doubleVal(doc));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
int ord=termsIndex.getOrd(doc);
|
||||
return ord==0 ? null : NumberUtils.SortableStr2double(termsIndex.lookup(ord, spare));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + doubleVal(doc);
|
||||
|
|
|
@ -153,6 +153,12 @@ class SortableFloatFieldSource extends FieldCacheSource {
|
|||
return description() + '=' + floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
int ord=termsIndex.getOrd(doc);
|
||||
return ord==0 ? null : NumberUtils.SortableStr2float(termsIndex.lookup(ord, spare));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
|
|
|
@ -155,6 +155,12 @@ class SortableIntFieldSource extends FieldCacheSource {
|
|||
return description() + '=' + intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
int ord=termsIndex.getOrd(doc);
|
||||
return ord==0 ? null : NumberUtils.SortableStr2int(termsIndex.lookup(ord, spare));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
|
|
|
@ -149,6 +149,12 @@ class SortableLongFieldSource extends FieldCacheSource {
|
|||
return Long.toString(longVal(doc));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
int ord=termsIndex.getOrd(doc);
|
||||
return ord==0 ? null : NumberUtils.SortableStr2long(termsIndex.lookup(ord, spare));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + longVal(doc);
|
||||
|
|
|
@ -19,9 +19,11 @@ package org.apache.solr.schema;
|
|||
|
||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.noggit.CharArr;
|
||||
import org.apache.solr.search.function.DocValues;
|
||||
import org.apache.solr.search.function.FieldCacheSource;
|
||||
import org.apache.solr.search.function.StringIndexDocValues;
|
||||
import org.apache.solr.util.ByteUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -40,32 +42,12 @@ public class StrFieldSource extends FieldCacheSource {
|
|||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
return new StringIndexDocValues(this, readerContext, field) {
|
||||
|
||||
@Override
|
||||
protected String toTerm(String readableValue) {
|
||||
return readableValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
int ord=termsIndex.getOrd(doc);
|
||||
return ord;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int ordVal(int doc) {
|
||||
return termsIndex.getOrd(doc);
|
||||
|
@ -77,13 +59,8 @@ public class StrFieldSource extends FieldCacheSource {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
int ord=termsIndex.getOrd(doc);
|
||||
if (ord == 0) {
|
||||
return null;
|
||||
} else {
|
||||
return termsIndex.lookup(ord, new BytesRef()).utf8ToString();
|
||||
}
|
||||
public Object objectVal(int doc) {
|
||||
return strVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -538,6 +538,11 @@ class TrieDateFieldSource extends LongFieldSource {
|
|||
return new MutableValueDate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object longToObject(long val) {
|
||||
return new Date(val);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long externalToLong(String extVal) {
|
||||
return TrieField.dateField.parseMath(null, extVal).getTime();
|
||||
|
|
|
@ -238,6 +238,7 @@ class ExtendedDismaxQParser extends QParser {
|
|||
|
||||
try {
|
||||
up.setRemoveStopFilter(!stopwords);
|
||||
up.exceptions = true;
|
||||
parsedUserQuery = up.parse(mainUserQuery);
|
||||
|
||||
if (stopwords && isEmpty(parsedUserQuery)) {
|
||||
|
@ -247,6 +248,7 @@ class ExtendedDismaxQParser extends QParser {
|
|||
}
|
||||
} catch (Exception e) {
|
||||
// ignore failure and reparse later after escaping reserved chars
|
||||
up.exceptions = false;
|
||||
}
|
||||
|
||||
if (parsedUserQuery != null && doMinMatched) {
|
||||
|
@ -785,12 +787,19 @@ class ExtendedDismaxQParser extends QParser {
|
|||
RANGE
|
||||
}
|
||||
|
||||
|
||||
static final RuntimeException unknownField = new RuntimeException("UnknownField");
|
||||
static {
|
||||
unknownField.fillInStackTrace();
|
||||
}
|
||||
|
||||
/**
|
||||
* A subclass of SolrQueryParser that supports aliasing fields for
|
||||
* constructing DisjunctionMaxQueries.
|
||||
*/
|
||||
class ExtendedSolrQueryParser extends SolrQueryParser {
|
||||
|
||||
|
||||
/** A simple container for storing alias info
|
||||
*/
|
||||
protected class Alias {
|
||||
|
@ -803,6 +812,7 @@ class ExtendedDismaxQParser extends QParser {
|
|||
boolean allowWildcard=true;
|
||||
int minClauseSize = 0; // minimum number of clauses per phrase query...
|
||||
// used when constructing boosting part of query via sloppy phrases
|
||||
boolean exceptions; // allow exceptions to be thrown (for example on a missing field)
|
||||
|
||||
ExtendedAnalyzer analyzer;
|
||||
|
||||
|
@ -982,6 +992,15 @@ class ExtendedDismaxQParser extends QParser {
|
|||
return q;
|
||||
}
|
||||
} else {
|
||||
|
||||
// verify that a fielded query is actually on a field that exists... if not,
|
||||
// then throw an exception to get us out of here, and we'll treat it like a
|
||||
// literal when we try the escape+re-parse.
|
||||
if (exceptions) {
|
||||
FieldType ft = schema.getFieldTypeNoEx(field);
|
||||
if (ft == null) throw unknownField;
|
||||
}
|
||||
|
||||
return getQuery();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -306,7 +306,7 @@ public class QueryParsing {
|
|||
Boolean top = sp.getSortDirection();
|
||||
if (null == top) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
"Can't determine Sort Order: " + sp);
|
||||
"Can't determine a Sort Order (asc or desc) in sort spec " + sp);
|
||||
}
|
||||
|
||||
if (SCORE.equals(field)) {
|
||||
|
@ -638,7 +638,11 @@ public class QueryParsing {
|
|||
}
|
||||
|
||||
String v = val.substring(start,pos);
|
||||
return flt ? Double.parseDouble(v) : Long.parseLong(v);
|
||||
if (flt) {
|
||||
return Double.parseDouble(v);
|
||||
} else {
|
||||
return Long.parseLong(v);
|
||||
}
|
||||
}
|
||||
|
||||
double getDouble() throws ParseException {
|
||||
|
|
|
@ -793,7 +793,7 @@ class LongConstValueSource extends ConstNumberSource {
|
|||
|
||||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
return new DocValues() {
|
||||
return new LongDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return fv;
|
||||
|
@ -814,11 +814,6 @@ class LongConstValueSource extends ConstNumberSource {
|
|||
return dv;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Long.toString(constant);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description();
|
||||
|
@ -901,28 +896,12 @@ abstract class DoubleParser extends NamedParser {
|
|||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final DocValues vals = source.getValues(context, readerContext);
|
||||
return new DocValues() {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)doubleVal(doc);
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)doubleVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)doubleVal(doc);
|
||||
}
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return func(doc, vals);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Double.toString(doubleVal(doc));
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return name() + '(' + vals.toString(doc) + ')';
|
||||
}
|
||||
|
@ -966,28 +945,12 @@ abstract class Double2Parser extends NamedParser {
|
|||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final DocValues aVals = a.getValues(context, readerContext);
|
||||
final DocValues bVals = b.getValues(context, readerContext);
|
||||
return new DocValues() {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)doubleVal(doc);
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)doubleVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)doubleVal(doc);
|
||||
}
|
||||
@Override
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return func(doc, aVals, bVals);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Double.toString(doubleVal(doc));
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')';
|
||||
}
|
||||
|
|
|
@ -88,6 +88,11 @@ public class ByteFieldSource extends NumericFieldCacheSource<ByteValues> {
|
|||
return description() + '=' + byteVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return arr[doc]; // TODO: valid?
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ public class ConstValueSource extends ConstNumberSource {
|
|||
|
||||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return constant;
|
||||
|
@ -59,13 +59,13 @@ public class ConstValueSource extends ConstNumberSource {
|
|||
return dv;
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(constant);
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description();
|
||||
}
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return constant;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -21,14 +21,13 @@ import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.solr.search.MutableValueInt;
|
||||
import org.apache.solr.search.MutableValue;
|
||||
import org.apache.solr.search.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
class ConstIntDocValues extends DocValues {
|
||||
class ConstIntDocValues extends IntDocValues {
|
||||
final int ival;
|
||||
final float fval;
|
||||
final double dval;
|
||||
|
@ -37,6 +36,7 @@ class ConstIntDocValues extends DocValues {
|
|||
final ValueSource parent;
|
||||
|
||||
ConstIntDocValues(int val, ValueSource parent) {
|
||||
super(parent);
|
||||
ival = val;
|
||||
fval = val;
|
||||
dval = val;
|
||||
|
@ -71,7 +71,7 @@ class ConstIntDocValues extends DocValues {
|
|||
}
|
||||
}
|
||||
|
||||
class ConstDoubleDocValues extends DocValues {
|
||||
class ConstDoubleDocValues extends DoubleDocValues {
|
||||
final int ival;
|
||||
final float fval;
|
||||
final double dval;
|
||||
|
@ -80,6 +80,7 @@ class ConstDoubleDocValues extends DocValues {
|
|||
final ValueSource parent;
|
||||
|
||||
ConstDoubleDocValues(double val, ValueSource parent) {
|
||||
super(parent);
|
||||
ival = (int)val;
|
||||
fval = (float)val;
|
||||
dval = val;
|
||||
|
@ -114,115 +115,6 @@ class ConstDoubleDocValues extends DocValues {
|
|||
}
|
||||
}
|
||||
|
||||
abstract class FloatDocValues extends DocValues {
|
||||
protected final ValueSource vs;
|
||||
|
||||
public FloatDocValues(ValueSource vs) {
|
||||
this.vs = vs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte byteVal(int doc) {
|
||||
return (byte)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short shortVal(int doc) {
|
||||
return (short)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract float floatVal(int doc);
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return vs.description() + '=' + strVal(doc);
|
||||
}
|
||||
}
|
||||
|
||||
abstract class IntDocValues extends DocValues {
|
||||
protected final ValueSource vs;
|
||||
|
||||
public IntDocValues(ValueSource vs) {
|
||||
this.vs = vs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte byteVal(int doc) {
|
||||
return (byte)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short shortVal(int doc) {
|
||||
return (short)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract int intVal(int doc);
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Integer.toString(intVal(doc));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return vs.description() + '=' + strVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
private final MutableValueInt mval = new MutableValueInt();
|
||||
|
||||
@Override
|
||||
public MutableValue getValue() {
|
||||
return mval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fillValue(int doc) {
|
||||
mval.value = intVal(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* <code>DocFreqValueSource</code> returns the number of documents containing the term.
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.solr.search.function;
|
|||
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.solr.search.MutableValue;
|
||||
import org.apache.solr.search.MutableValueFloat;
|
||||
|
||||
|
@ -47,6 +48,28 @@ public abstract class DocValues {
|
|||
// TODO: should we make a termVal, returns BytesRef?
|
||||
public String strVal(int doc) { throw new UnsupportedOperationException(); }
|
||||
|
||||
/** returns the bytes representation of the string val - TODO: should this return the indexed raw bytes not? */
|
||||
public boolean bytesVal(int doc, BytesRef target) {
|
||||
String s = strVal(doc);
|
||||
if (s==null) {
|
||||
target.length = 0;
|
||||
return false;
|
||||
}
|
||||
target.copy(s);
|
||||
return true;
|
||||
};
|
||||
|
||||
/** Native Java Object representation of the value */
|
||||
public Object objectVal(int doc) {
|
||||
// most DocValues are functions, so by default return a Float()
|
||||
return floatVal(doc);
|
||||
}
|
||||
|
||||
/** Returns true if there is a value for this document */
|
||||
public boolean exists(int doc) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param doc The doc to retrieve to sort ordinal for
|
||||
* @return the sort ordinal for the specified doc
|
||||
|
|
|
@ -40,7 +40,7 @@ public class DoubleConstValueSource extends ConstNumberSource {
|
|||
|
||||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
return new DocValues() {
|
||||
return new DoubleDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return fv;
|
||||
|
@ -66,6 +66,11 @@ public class DoubleConstValueSource extends ConstNumberSource {
|
|||
return Double.toString(constant);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return constant;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description();
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
package org.apache.solr.search.function;
|
||||
|
||||
import org.apache.solr.search.MutableValue;
|
||||
import org.apache.solr.search.MutableValueDouble;
|
||||
|
||||
public abstract class DoubleDocValues extends DocValues {
|
||||
protected final ValueSource vs;
|
||||
|
||||
public DoubleDocValues(ValueSource vs) {
|
||||
this.vs = vs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte byteVal(int doc) {
|
||||
return (byte)doubleVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short shortVal(int doc) {
|
||||
return (short)doubleVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)doubleVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)doubleVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)doubleVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract double doubleVal(int doc);
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Double.toString(doubleVal(doc));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return exists(doc) ? doubleVal(doc) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return vs.description() + '=' + strVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
private final MutableValueDouble mval = new MutableValueDouble();
|
||||
|
||||
@Override
|
||||
public MutableValue getValue() {
|
||||
return mval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fillValue(int doc) {
|
||||
mval.value = doubleVal(doc);
|
||||
mval.exists = exists(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
|
@ -51,7 +51,7 @@ public class DoubleFieldSource extends NumericFieldCacheSource<DoubleValues> {
|
|||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final DoubleValues vals = cache.getDoubles(readerContext.reader, field, creator);
|
||||
final double[] arr = vals.values;
|
||||
final Bits valid = vals.valid;
|
||||
final Bits valid = vals.valid;
|
||||
|
||||
return new DocValues() {
|
||||
@Override
|
||||
|
@ -79,6 +79,11 @@ public class DoubleFieldSource extends NumericFieldCacheSource<DoubleValues> {
|
|||
return Double.toString(arr[doc]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return valid.get(doc) ? arr[doc] : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + doubleVal(doc);
|
||||
|
|
|
@ -48,28 +48,12 @@ public abstract class DualFloatFunction extends ValueSource {
|
|||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final DocValues aVals = a.getValues(context, readerContext);
|
||||
final DocValues bVals = b.getValues(context, readerContext);
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return func(doc, aVals, bVals);
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')';
|
||||
}
|
||||
|
|
|
@ -65,35 +65,15 @@ public class FileFloatSource extends ValueSource {
|
|||
final int off = offset;
|
||||
|
||||
final float[] arr = getCachedFloats(topLevelContext.reader);
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return arr[doc + off];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)arr[doc + off];
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)arr[doc + off];
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)arr[doc + off];
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(arr[doc + off]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + floatVal(doc);
|
||||
public Object objectVal(int doc) {
|
||||
return floatVal(doc); // TODO: keep track of missing values
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
package org.apache.solr.search.function;
|
||||
|
||||
import org.apache.solr.search.MutableValue;
|
||||
import org.apache.solr.search.MutableValueFloat;
|
||||
|
||||
public abstract class FloatDocValues extends DocValues {
|
||||
protected final ValueSource vs;
|
||||
|
||||
public FloatDocValues(ValueSource vs) {
|
||||
this.vs = vs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte byteVal(int doc) {
|
||||
return (byte)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short shortVal(int doc) {
|
||||
return (short)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract float floatVal(int doc);
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return exists(doc) ? floatVal(doc) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return vs.description() + '=' + strVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
private final MutableValueFloat mval = new MutableValueFloat();
|
||||
|
||||
@Override
|
||||
public MutableValue getValue() {
|
||||
return mval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fillValue(int doc) {
|
||||
mval.value = floatVal(doc);
|
||||
mval.exists = exists(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -50,37 +50,22 @@ public class FloatFieldSource extends NumericFieldCacheSource<FloatValues> {
|
|||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final FloatValues vals = cache.getFloats(readerContext.reader, field, creator);
|
||||
final float[] arr = vals.values;
|
||||
final Bits valid = vals.valid;
|
||||
final Bits valid = vals.valid;
|
||||
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return arr[doc];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)arr[doc];
|
||||
public Object objectVal(int doc) {
|
||||
return valid.get(doc) ? arr[doc] : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)arr[doc];
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)arr[doc];
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(arr[doc]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + floatVal(doc);
|
||||
public boolean exists(int doc) {
|
||||
return valid.get(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
package org.apache.solr.search.function;
|
||||
|
||||
import org.apache.solr.search.MutableValue;
|
||||
import org.apache.solr.search.MutableValueInt;
|
||||
|
||||
public abstract class IntDocValues extends DocValues {
|
||||
protected final ValueSource vs;
|
||||
|
||||
public IntDocValues(ValueSource vs) {
|
||||
this.vs = vs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte byteVal(int doc) {
|
||||
return (byte)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short shortVal(int doc) {
|
||||
return (short)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract int intVal(int doc);
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Integer.toString(intVal(doc));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return exists(doc) ? intVal(doc) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return vs.description() + '=' + strVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
private final MutableValueInt mval = new MutableValueInt();
|
||||
|
||||
@Override
|
||||
public MutableValue getValue() {
|
||||
return mval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fillValue(int doc) {
|
||||
mval.value = intVal(doc);
|
||||
mval.exists = exists(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -51,9 +51,9 @@ public class IntFieldSource extends NumericFieldCacheSource<IntValues> {
|
|||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final IntValues vals = cache.getInts(readerContext.reader, field, creator);
|
||||
final int[] arr = vals.values;
|
||||
final Bits valid = vals.valid;
|
||||
final Bits valid = vals.valid;
|
||||
|
||||
return new DocValues() {
|
||||
return new IntDocValues(this) {
|
||||
final MutableValueInt val = new MutableValueInt();
|
||||
|
||||
@Override
|
||||
|
@ -81,6 +81,16 @@ public class IntFieldSource extends NumericFieldCacheSource<IntValues> {
|
|||
return Float.toString(arr[doc]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return valid.get(doc) ? arr[doc] : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exists(int doc) {
|
||||
return valid.get(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + intVal(doc);
|
||||
|
|
|
@ -54,7 +54,7 @@ public class JoinDocFreqValueSource extends FieldCacheSource {
|
|||
final DocTerms terms = cache.getTerms(readerContext.reader, field, true );
|
||||
final IndexReader top = ReaderUtil.getTopLevelContext(readerContext).reader;
|
||||
|
||||
return new DocValues() {
|
||||
return new IntDocValues(this) {
|
||||
BytesRef ref = new BytesRef();
|
||||
|
||||
@Override
|
||||
|
@ -70,31 +70,6 @@ public class JoinDocFreqValueSource extends FieldCacheSource {
|
|||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in function "+description()+" : doc="+doc, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return intVal(doc) + "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + intVal(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -50,28 +50,12 @@ public class LinearFloatFunction extends ValueSource {
|
|||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final DocValues vals = source.getValues(context, readerContext);
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return vals.floatVal(doc) * slope + intercept;
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return slope + "*float(" + vals.toString(doc) + ")+" + intercept;
|
||||
}
|
||||
|
|
|
@ -17,19 +17,23 @@ package org.apache.solr.search.function;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
import java.util.Map;
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
/**
|
||||
* Pass a the field value through as a String, no matter the type
|
||||
* Pass a the field value through as a String, no matter the type // Q: doesn't this mean it's a "string"?
|
||||
*
|
||||
**/
|
||||
public class LiteralValueSource extends ValueSource {
|
||||
protected final String string;
|
||||
protected final BytesRef bytesRef;
|
||||
|
||||
public LiteralValueSource(String string) {
|
||||
this.string = string;
|
||||
this.bytesRef = new BytesRef(string);
|
||||
}
|
||||
|
||||
/** returns the literal value */
|
||||
|
@ -40,12 +44,18 @@ public class LiteralValueSource extends ValueSource {
|
|||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
|
||||
return new DocValues() {
|
||||
return new StrDocValues(this) {
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return string;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean bytesVal(int doc, BytesRef target) {
|
||||
target.copy(bytesRef);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return string;
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
package org.apache.solr.search.function;
|
||||
|
||||
import org.apache.solr.search.MutableValue;
|
||||
import org.apache.solr.search.MutableValueLong;
|
||||
|
||||
public abstract class LongDocValues extends DocValues {
|
||||
protected final ValueSource vs;
|
||||
|
||||
public LongDocValues(ValueSource vs) {
|
||||
this.vs = vs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte byteVal(int doc) {
|
||||
return (byte)longVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public short shortVal(int doc) {
|
||||
return (short)longVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)longVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)longVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract long longVal(int doc);
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)longVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Long.toString(longVal(doc));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
return exists(doc) ? longVal(doc) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return vs.description() + '=' + strVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
private final MutableValueLong mval = new MutableValueLong();
|
||||
|
||||
@Override
|
||||
public MutableValue getValue() {
|
||||
return mval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fillValue(int doc) {
|
||||
mval.value = longVal(doc);
|
||||
mval.exists = exists(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -52,41 +52,30 @@ public class LongFieldSource extends NumericFieldCacheSource<LongValues> {
|
|||
return Long.parseLong(extVal);
|
||||
}
|
||||
|
||||
public Object longToObject(long val) {
|
||||
return val;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final LongValues vals = cache.getLongs(readerContext.reader, field, creator);
|
||||
final long[] arr = vals.values;
|
||||
final Bits valid = vals.valid;
|
||||
final Bits valid = vals.valid;
|
||||
|
||||
return new DocValues() {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float) arr[doc];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int) arr[doc];
|
||||
}
|
||||
|
||||
return new LongDocValues(this) {
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return arr[doc];
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return arr[doc];
|
||||
public boolean exists(int doc) {
|
||||
return valid.get(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Long.toString(arr[doc]);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + longVal(doc);
|
||||
public Object objectVal(int doc) {
|
||||
return valid.get(doc) ? longToObject(arr[doc]) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -142,8 +131,6 @@ public class LongFieldSource extends NumericFieldCacheSource<LongValues> {
|
|||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -62,28 +62,12 @@ public abstract class MultiFloatFunction extends ValueSource {
|
|||
valsArr[i] = sources[i].getValues(context, readerContext);
|
||||
}
|
||||
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return func(doc, valsArr);
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
@Override
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(name()).append('(');
|
||||
|
|
|
@ -63,53 +63,29 @@ public class OrdFieldSource extends ValueSource {
|
|||
final int off = readerContext.docBase;
|
||||
final IndexReader topReader = ReaderUtil.getTopLevelContext(readerContext).reader;
|
||||
final FieldCache.DocTermsIndex sindex = FieldCache.DEFAULT.getTermsIndex(topReader, field);
|
||||
return new DocValues() {
|
||||
return new IntDocValues(this) {
|
||||
protected String toTerm(String readableValue) {
|
||||
return readableValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)sindex.getOrd(doc+off);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return sindex.getOrd(doc+off);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)sindex.getOrd(doc+off);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)sindex.getOrd(doc+off);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int ordVal(int doc) {
|
||||
return sindex.getOrd(doc+off);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int numOrd() {
|
||||
return sindex.numOrd();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
// the string value of the ordinal, not the string itself
|
||||
return Integer.toString(sindex.getOrd(doc+off));
|
||||
public boolean exists(int doc) {
|
||||
return sindex.getOrd(doc+off) != 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + intVal(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFiller getValueFiller() {
|
||||
return new ValueFiller() {
|
||||
private final MutableValueInt mval = new MutableValueInt();
|
||||
|
|
|
@ -23,6 +23,8 @@ import org.apache.lucene.search.*;
|
|||
import org.apache.lucene.search.Weight.ScorerContext;
|
||||
import org.apache.lucene.util.ReaderUtil;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.search.MutableValue;
|
||||
import org.apache.solr.search.MutableValueFloat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -49,7 +51,7 @@ public class QueryValueSource extends ValueSource {
|
|||
|
||||
@Override
|
||||
public DocValues getValues(Map fcontext, AtomicReaderContext readerContext) throws IOException {
|
||||
return new QueryDocValues(readerContext, q, defVal, fcontext);
|
||||
return new QueryDocValues(this, readerContext, fcontext);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -72,31 +74,31 @@ public class QueryValueSource extends ValueSource {
|
|||
}
|
||||
|
||||
|
||||
class QueryDocValues extends DocValues {
|
||||
final Query q;
|
||||
// final IndexReader reader;
|
||||
class QueryDocValues extends FloatDocValues {
|
||||
final AtomicReaderContext readerContext;
|
||||
final Weight weight;
|
||||
final float defVal;
|
||||
final Map fcontext;
|
||||
final Query q;
|
||||
|
||||
Scorer scorer;
|
||||
int scorerDoc; // the document the scorer is on
|
||||
boolean noMatches=false;
|
||||
|
||||
// the last document requested... start off with high value
|
||||
// to trigger a scorer reset on first access.
|
||||
int lastDocRequested=Integer.MAX_VALUE;
|
||||
|
||||
|
||||
public QueryDocValues(AtomicReaderContext readerContext, Query q, float defVal, Map fcontext) throws IOException {
|
||||
IndexReader reader = readerContext.reader;
|
||||
public QueryDocValues(QueryValueSource vs, AtomicReaderContext readerContext, Map fcontext) throws IOException {
|
||||
super(vs);
|
||||
|
||||
this.readerContext = readerContext;
|
||||
this.q = q;
|
||||
this.defVal = defVal;
|
||||
this.defVal = vs.defVal;
|
||||
this.q = vs.q;
|
||||
this.fcontext = fcontext;
|
||||
|
||||
Weight w = fcontext==null ? null : (Weight)fcontext.get(q);
|
||||
// TODO: sort by function doesn't weight (SOLR-1297 is open because of this bug)... so weightSearcher will currently be null
|
||||
if (w == null) {
|
||||
IndexSearcher weightSearcher;
|
||||
if(fcontext == null) {
|
||||
|
@ -116,8 +118,12 @@ class QueryDocValues extends DocValues {
|
|||
public float floatVal(int doc) {
|
||||
try {
|
||||
if (doc < lastDocRequested) {
|
||||
if (noMatches) return defVal;
|
||||
scorer = weight.scorer(readerContext, ScorerContext.def());
|
||||
if (scorer==null) return defVal;
|
||||
if (scorer==null) {
|
||||
noMatches = true;
|
||||
return defVal;
|
||||
}
|
||||
scorerDoc = -1;
|
||||
}
|
||||
lastDocRequested = doc;
|
||||
|
@ -137,24 +143,104 @@ class QueryDocValues extends DocValues {
|
|||
} catch (IOException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
public boolean exists(int doc) {
|
||||
try {
|
||||
if (doc < lastDocRequested) {
|
||||
if (noMatches) return false;
|
||||
scorer = weight.scorer(readerContext, ScorerContext.def());
|
||||
scorerDoc = -1;
|
||||
if (scorer==null) {
|
||||
noMatches = true;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
lastDocRequested = doc;
|
||||
|
||||
if (scorerDoc < doc) {
|
||||
scorerDoc = scorer.advance(doc);
|
||||
}
|
||||
|
||||
if (scorerDoc > doc) {
|
||||
// query doesn't match this document... either because we hit the
|
||||
// end, or because the next doc is after this doc.
|
||||
return false;
|
||||
}
|
||||
|
||||
// a match!
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object objectVal(int doc) {
|
||||
try {
|
||||
return exists(doc) ? scorer.score() : null;
|
||||
} catch (IOException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
public ValueFiller getValueFiller() {
|
||||
//
|
||||
// TODO: if we want to support more than one value-filler or a value-filler in conjunction with
|
||||
// the DocValues, then members like "scorer" should be per ValueFiller instance.
|
||||
// Or we can say that the user should just instantiate multiple DocValues.
|
||||
//
|
||||
return new ValueFiller() {
|
||||
private final MutableValueFloat mval = new MutableValueFloat();
|
||||
|
||||
@Override
|
||||
public MutableValue getValue() {
|
||||
return mval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fillValue(int doc) {
|
||||
try {
|
||||
if (noMatches) {
|
||||
mval.value = defVal;
|
||||
mval.exists = false;
|
||||
return;
|
||||
}
|
||||
scorer = weight.scorer(readerContext, ScorerContext.def());
|
||||
scorerDoc = -1;
|
||||
if (scorer==null) {
|
||||
noMatches = true;
|
||||
mval.value = defVal;
|
||||
mval.exists = false;
|
||||
return;
|
||||
}
|
||||
lastDocRequested = doc;
|
||||
|
||||
if (scorerDoc < doc) {
|
||||
scorerDoc = scorer.advance(doc);
|
||||
}
|
||||
|
||||
if (scorerDoc > doc) {
|
||||
// query doesn't match this document... either because we hit the
|
||||
// end, or because the next doc is after this doc.
|
||||
mval.value = defVal;
|
||||
mval.exists = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// a match!
|
||||
mval.value = scorer.score();
|
||||
mval.exists = true;
|
||||
return;
|
||||
} catch (IOException e) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return "query(" + q + ",def=" + defVal + ")=" + floatVal(doc);
|
||||
|
|
|
@ -54,29 +54,13 @@ public class RangeMapFloatFunction extends ValueSource {
|
|||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final DocValues vals = source.getValues(context, readerContext);
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
float val = vals.floatVal(doc);
|
||||
return (val>=min && val<=max) ? target : (defaultVal == null ? val : defaultVal);
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return "map(" + vals.toString(doc) + ",min=" + min + ",max=" + max + ",target=" + target + ")";
|
||||
}
|
||||
|
|
|
@ -60,28 +60,12 @@ public class ReciprocalFloatFunction extends ValueSource {
|
|||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final DocValues vals = source.getValues(context, readerContext);
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return a/(m*vals.floatVal(doc) + b);
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return Float.toString(a) + "/("
|
||||
+ m + "*float(" + vals.toString(doc) + ')'
|
||||
|
|
|
@ -66,47 +66,11 @@ public class ReverseOrdFieldSource extends ValueSource {
|
|||
final FieldCache.DocTermsIndex sindex = FieldCache.DEFAULT.getTermsIndex(topReader, field);
|
||||
final int end = sindex.numOrd();
|
||||
|
||||
return new DocValues() {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (float)(end - sindex.getOrd(doc+off));
|
||||
}
|
||||
|
||||
@Override
|
||||
return new IntDocValues(this) {
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (end - sindex.getOrd(doc+off));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)(end - sindex.getOrd(doc+off));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int ordVal(int doc) {
|
||||
return (end - sindex.getOrd(doc+off));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int numOrd() {
|
||||
return end;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)(end - sindex.getOrd(doc+off));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
// the string value of the ordinal, not the string itself
|
||||
return Integer.toString((end - sindex.getOrd(doc+off)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return description() + '=' + strVal(doc);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -108,28 +108,12 @@ public class ScaleFloatFunction extends ValueSource {
|
|||
|
||||
final DocValues vals = source.getValues(context, readerContext);
|
||||
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return (vals.floatVal(doc) - minSource) * scale + min;
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return "scale(" + vals.toString(doc) + ",toMin=" + min + ",toMax=" + max
|
||||
+ ",fromMin=" + minSource
|
||||
|
|
|
@ -34,28 +34,12 @@ import java.util.Map;
|
|||
@Override
|
||||
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
|
||||
final DocValues vals = source.getValues(context, readerContext);
|
||||
return new DocValues() {
|
||||
return new FloatDocValues(this) {
|
||||
@Override
|
||||
public float floatVal(int doc) {
|
||||
return func(doc, vals);
|
||||
}
|
||||
@Override
|
||||
public int intVal(int doc) {
|
||||
return (int)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public long longVal(int doc) {
|
||||
return (long)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public double doubleVal(int doc) {
|
||||
return (double)floatVal(doc);
|
||||
}
|
||||
@Override
|
||||
public String strVal(int doc) {
|
||||
return Float.toString(floatVal(doc));
|
||||
}
|
||||
@Override
|
||||
public String toString(int doc) {
|
||||
return name() + '(' + vals.toString(doc) + ')';
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue