LUCENE-3023: merged with trunk

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/realtime_search@1096120 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Simon Willnauer 2011-04-23 08:07:28 +00:00
commit 990a5d6f3a
119 changed files with 1490 additions and 956 deletions

View File

@ -19,7 +19,7 @@
<project name="lucene-solr" default="test" basedir="."> <project name="lucene-solr" default="test" basedir=".">
<import file="common-build.xml"/> <import file="common-build.xml"/>
<target name="test" description="Test both Lucene and Solr"> <target name="test" description="Test both Lucene and Solr" depends="validate">
<sequential> <sequential>
<subant target="test" inheritall="false" failonerror="true"> <subant target="test" inheritall="false" failonerror="true">
<fileset dir="lucene" includes="build.xml" /> <fileset dir="lucene" includes="build.xml" />
@ -35,7 +35,7 @@
<fileset dir="solr" includes="build.xml" /> <fileset dir="solr" includes="build.xml" />
</subant></sequential> </subant></sequential>
</target> </target>
<target name="compile" depends="validate" description="Compile Lucene and Solr"> <target name="compile" description="Compile Lucene and Solr">
<sequential> <sequential>
<subant target="compile" inheritall="false" failonerror="true"> <subant target="compile" inheritall="false" failonerror="true">

View File

@ -387,8 +387,8 @@ Test Cases
Build Build
* LUCENE-3006: Building javadocs will fail on warnings by default. Override with -Dfailonjavadocwarning=false (sarowe, gsingers) * LUCENE-3006: Building javadocs will fail on warnings by default.
Override with -Dfailonjavadocwarning=false (sarowe, gsingers)
======================= Lucene 3.x (not yet released) ======================= ======================= Lucene 3.x (not yet released) =======================
@ -410,6 +410,14 @@ Bug fixes
seeking TermEnum (eg used by Solr's faceting) (Tom Burton-West, Mike seeking TermEnum (eg used by Solr's faceting) (Tom Burton-West, Mike
McCandless) McCandless)
* LUCENE-3042: When a filter or consumer added Attributes to a TokenStream
chain after it was already (partly) consumed [or clearAttributes(),
captureState(), cloneAttributes(),... was called by the Tokenizer],
the Tokenizer calling clearAttributes() or capturing state after addition
may not do this on the newly added Attribute. This bug affected only
very special use cases of the TokenStream-API, most users would not
have recognized it. (Uwe Schindler, Robert Muir)
======================= Lucene 3.1.0 ======================= ======================= Lucene 3.1.0 =======================
Changes in backwards compatibility policy Changes in backwards compatibility policy

View File

@ -308,7 +308,7 @@
</copy> </copy>
</target> </target>
<target name="compile" depends="compile-core, validate-lucene"> <target name="compile" depends="compile-core">
<!-- convenience target to compile core --> <!-- convenience target to compile core -->
</target> </target>
@ -565,7 +565,7 @@
</sequential> </sequential>
</macrodef> </macrodef>
<target name="test" depends="compile-test,junit-mkdir,junit-sequential,junit-parallel" description="Runs unit tests"/> <target name="test" depends="compile-test,validate-lucene,junit-mkdir,junit-sequential,junit-parallel" description="Runs unit tests"/>
<target name="junit-mkdir"> <target name="junit-mkdir">
<mkdir dir="${junit.output.dir}"/> <mkdir dir="${junit.output.dir}"/>

View File

@ -45,10 +45,15 @@ API Changes
======================= Lucene 3.x (not yet released) ======================= ======================= Lucene 3.x (not yet released) =======================
Bug fixes Bug Fixes
* LUCENE-3026: SmartChineseAnalyzer's WordTokenFilter threw NullPointerException * LUCENE-3026: SmartChineseAnalyzer's WordTokenFilter threw NullPointerException
on sentences longer than 32,767 characters. (wangzhenghang via Robert Muir) on sentences longer than 32,767 characters. (wangzhenghang via Robert Muir)
* LUCENE-2939: Highlighter should try and use maxDocCharsToAnalyze in
WeightedSpanTermExtractor when adding a new field to MemoryIndex as well as
when using CachingTokenStream. This can be a significant performance bug for
large documents. (Mark Miller)
New Features New Features

View File

@ -0,0 +1,2 @@
AnyObjectId[9a9ff077cdd36a96e7e0506986edd4e52b90a22f] was removed in git history.
Apache SVN contains full history.

View File

@ -0,0 +1 @@
No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking.

View File

@ -0,0 +1 @@
No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking.

View File

@ -0,0 +1 @@
No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking.

View File

@ -0,0 +1,2 @@
AnyObjectId[99baf20bacd712cae91dd6e4e1f46224cafa1a37] was removed in git history.
Apache SVN contains full history.

View File

@ -0,0 +1 @@
No bdb jars are shipped with lucene. This is a fake license to work around the automated license checking.

View File

@ -197,6 +197,11 @@ public class Highlighter
tokenStream.reset(); tokenStream.reset();
TextFragment currentFrag = new TextFragment(newText,newText.length(), docFrags.size()); TextFragment currentFrag = new TextFragment(newText,newText.length(), docFrags.size());
if (fragmentScorer instanceof QueryScorer) {
((QueryScorer) fragmentScorer).setMaxDocCharsToAnalyze(maxDocCharsToAnalyze);
}
TokenStream newStream = fragmentScorer.init(tokenStream); TokenStream newStream = fragmentScorer.init(tokenStream);
if(newStream != null) { if(newStream != null) {
tokenStream = newStream; tokenStream = newStream;

View File

@ -0,0 +1 @@
package org.apache.lucene.search.highlight; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; /** * This TokenFilter limits the number of tokens while indexing by adding up the * current offset. */ public final class OffsetLimitTokenFilter extends TokenFilter { private int offsetCount; private OffsetAttribute offsetAttrib = getAttribute(OffsetAttribute.class); private int offsetLimit; public OffsetLimitTokenFilter(TokenStream input, int offsetLimit) { super(input); this.offsetLimit = offsetLimit; } @Override public boolean incrementToken() throws IOException { if (offsetCount < offsetLimit && input.incrementToken()) { int offsetLength = offsetAttrib.endOffset() - offsetAttrib.startOffset(); offsetCount += offsetLength; return true; } return false; } @Override public void reset() throws IOException { super.reset(); offsetCount = 0; } }

View File

@ -54,6 +54,7 @@ public class QueryScorer implements Scorer {
private IndexReader reader; private IndexReader reader;
private boolean skipInitExtractor; private boolean skipInitExtractor;
private boolean wrapToCaching = true; private boolean wrapToCaching = true;
private int maxCharsToAnalyze;
/** /**
* @param query Query to use for highlighting * @param query Query to use for highlighting
@ -209,7 +210,7 @@ public class QueryScorer implements Scorer {
private TokenStream initExtractor(TokenStream tokenStream) throws IOException { private TokenStream initExtractor(TokenStream tokenStream) throws IOException {
WeightedSpanTermExtractor qse = defaultField == null ? new WeightedSpanTermExtractor() WeightedSpanTermExtractor qse = defaultField == null ? new WeightedSpanTermExtractor()
: new WeightedSpanTermExtractor(defaultField); : new WeightedSpanTermExtractor(defaultField);
qse.setMaxDocCharsToAnalyze(maxCharsToAnalyze);
qse.setExpandMultiTermQuery(expandMultiTermQuery); qse.setExpandMultiTermQuery(expandMultiTermQuery);
qse.setWrapIfNotCachingTokenFilter(wrapToCaching); qse.setWrapIfNotCachingTokenFilter(wrapToCaching);
if (reader == null) { if (reader == null) {
@ -265,4 +266,8 @@ public class QueryScorer implements Scorer {
public void setWrapIfNotCachingTokenFilter(boolean wrap) { public void setWrapIfNotCachingTokenFilter(boolean wrap) {
this.wrapToCaching = wrap; this.wrapToCaching = wrap;
} }
public void setMaxDocCharsToAnalyze(int maxDocCharsToAnalyze) {
this.maxCharsToAnalyze = maxDocCharsToAnalyze;
}
} }

View File

@ -56,6 +56,7 @@ public class WeightedSpanTermExtractor {
private boolean expandMultiTermQuery; private boolean expandMultiTermQuery;
private boolean cachedTokenStream; private boolean cachedTokenStream;
private boolean wrapToCaching = true; private boolean wrapToCaching = true;
private int maxDocCharsToAnalyze;
public WeightedSpanTermExtractor() { public WeightedSpanTermExtractor() {
} }
@ -320,13 +321,13 @@ public class WeightedSpanTermExtractor {
private AtomicReaderContext getLeafContextForField(String field) throws IOException { private AtomicReaderContext getLeafContextForField(String field) throws IOException {
if(wrapToCaching && !cachedTokenStream && !(tokenStream instanceof CachingTokenFilter)) { if(wrapToCaching && !cachedTokenStream && !(tokenStream instanceof CachingTokenFilter)) {
tokenStream = new CachingTokenFilter(tokenStream); tokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tokenStream, maxDocCharsToAnalyze));
cachedTokenStream = true; cachedTokenStream = true;
} }
AtomicReaderContext context = readers.get(field); AtomicReaderContext context = readers.get(field);
if (context == null) { if (context == null) {
MemoryIndex indexer = new MemoryIndex(); MemoryIndex indexer = new MemoryIndex();
indexer.addField(field, tokenStream); indexer.addField(field, new OffsetLimitTokenFilter(tokenStream, maxDocCharsToAnalyze));
tokenStream.reset(); tokenStream.reset();
IndexSearcher searcher = indexer.createSearcher(); IndexSearcher searcher = indexer.createSearcher();
// MEM index has only atomic ctx // MEM index has only atomic ctx
@ -545,4 +546,8 @@ public class WeightedSpanTermExtractor {
public void setWrapIfNotCachingTokenFilter(boolean wrap) { public void setWrapIfNotCachingTokenFilter(boolean wrap) {
this.wrapToCaching = wrap; this.wrapToCaching = wrap;
} }
protected final void setMaxDocCharsToAnalyze(int maxDocCharsToAnalyze) {
this.maxDocCharsToAnalyze = maxDocCharsToAnalyze;
}
} }

View File

@ -0,0 +1 @@
package org.apache.lucene.search.highlight; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.Reader; import java.io.StringReader; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; public class OffsetLimitTokenFilterTest extends BaseTokenStreamTestCase { public void testFilter() throws Exception { TokenStream stream = new MockTokenizer(new StringReader( "short toolong evenmuchlongertext a ab toolong foo"), MockTokenizer.WHITESPACE, false); OffsetLimitTokenFilter filter = new OffsetLimitTokenFilter(stream, 10); assertTokenStreamContents(filter, new String[] {"short", "toolong"}); stream = new MockTokenizer(new StringReader( "short toolong evenmuchlongertext a ab toolong foo"), MockTokenizer.WHITESPACE, false); filter = new OffsetLimitTokenFilter(stream, 12); assertTokenStreamContents(filter, new String[] {"short", "toolong"}); stream = new MockTokenizer(new StringReader( "short toolong evenmuchlongertext a ab toolong foo"), MockTokenizer.WHITESPACE, false); filter = new OffsetLimitTokenFilter(stream, 30); assertTokenStreamContents(filter, new String[] {"short", "toolong", "evenmuchlongertext"}); checkOneTermReuse(new Analyzer() { @Override public TokenStream tokenStream(String fieldName, Reader reader) { return new OffsetLimitTokenFilter(new MockTokenizer(reader, MockTokenizer.WHITESPACE, false), 10); } }, "llenges", "llenges"); } }

View File

@ -184,9 +184,21 @@ public abstract class MultiLevelSkipListReader {
} }
} }
/** returns x == 0 ? 0 : Math.floor(Math.log(x) / Math.log(base)) */
static int log(int x, int base) {
assert base >= 2;
int ret = 0;
long n = base; // needs to be a long to avoid overflow
while (x >= n) {
n *= base;
ret++;
}
return ret;
}
/** Loads the skip levels */ /** Loads the skip levels */
private void loadSkipLevels() throws IOException { private void loadSkipLevels() throws IOException {
numberOfSkipLevels = docCount == 0 ? 0 : (int) Math.floor(Math.log(docCount) / Math.log(skipInterval[0])); numberOfSkipLevels = log(docCount, skipInterval[0]);
if (numberOfSkipLevels > maxNumberOfSkipLevels) { if (numberOfSkipLevels > maxNumberOfSkipLevels) {
numberOfSkipLevels = maxNumberOfSkipLevels; numberOfSkipLevels = maxNumberOfSkipLevels;
} }

View File

@ -61,7 +61,7 @@ public abstract class MultiLevelSkipListWriter {
this.skipInterval = skipInterval; this.skipInterval = skipInterval;
// calculate the maximum number of skip levels for this document frequency // calculate the maximum number of skip levels for this document frequency
numberOfSkipLevels = df == 0 ? 0 : (int) Math.floor(Math.log(df) / Math.log(skipInterval)); numberOfSkipLevels = MultiLevelSkipListReader.log(df, skipInterval);
// make sure it does not exceed maxSkipLevels // make sure it does not exceed maxSkipLevels
if (numberOfSkipLevels > maxSkipLevels) { if (numberOfSkipLevels > maxSkipLevels) {

View File

@ -93,10 +93,33 @@ public class AttributeSource {
} }
} }
/**
* This class holds the state of an AttributeSource.
* @see #captureState
* @see #restoreState
*/
public static final class State implements Cloneable {
AttributeImpl attribute;
State next;
@Override
public Object clone() {
State clone = new State();
clone.attribute = (AttributeImpl) attribute.clone();
if (next != null) {
clone.next = (State) next.clone();
}
return clone;
}
}
// These two maps must always be in sync!!! // These two maps must always be in sync!!!
// So they are private, final and read-only from the outside (read-only iterators) // So they are private, final and read-only from the outside (read-only iterators)
private final Map<Class<? extends Attribute>, AttributeImpl> attributes; private final Map<Class<? extends Attribute>, AttributeImpl> attributes;
private final Map<Class<? extends AttributeImpl>, AttributeImpl> attributeImpls; private final Map<Class<? extends AttributeImpl>, AttributeImpl> attributeImpls;
private final State[] currentState;
private AttributeFactory factory; private AttributeFactory factory;
@ -116,6 +139,7 @@ public class AttributeSource {
} }
this.attributes = input.attributes; this.attributes = input.attributes;
this.attributeImpls = input.attributeImpls; this.attributeImpls = input.attributeImpls;
this.currentState = input.currentState;
this.factory = input.factory; this.factory = input.factory;
} }
@ -125,6 +149,7 @@ public class AttributeSource {
public AttributeSource(AttributeFactory factory) { public AttributeSource(AttributeFactory factory) {
this.attributes = new LinkedHashMap<Class<? extends Attribute>, AttributeImpl>(); this.attributes = new LinkedHashMap<Class<? extends Attribute>, AttributeImpl>();
this.attributeImpls = new LinkedHashMap<Class<? extends AttributeImpl>, AttributeImpl>(); this.attributeImpls = new LinkedHashMap<Class<? extends AttributeImpl>, AttributeImpl>();
this.currentState = new State[1];
this.factory = factory; this.factory = factory;
} }
@ -147,11 +172,8 @@ public class AttributeSource {
* if one instance implements more than one Attribute interface. * if one instance implements more than one Attribute interface.
*/ */
public final Iterator<AttributeImpl> getAttributeImplsIterator() { public final Iterator<AttributeImpl> getAttributeImplsIterator() {
if (hasAttributes()) { final State initState = getCurrentState();
if (currentState == null) { if (initState != null) {
computeCurrentState();
}
final State initState = currentState;
return new Iterator<AttributeImpl>() { return new Iterator<AttributeImpl>() {
private State state = initState; private State state = initState;
@ -225,7 +247,7 @@ public class AttributeSource {
// Attribute is a superclass of this interface // Attribute is a superclass of this interface
if (!attributes.containsKey(curInterface)) { if (!attributes.containsKey(curInterface)) {
// invalidate state to force recomputation in captureState() // invalidate state to force recomputation in captureState()
this.currentState = null; this.currentState[0] = null;
attributes.put(curInterface, att); attributes.put(curInterface, att);
attributeImpls.put(clazz, att); attributeImpls.put(clazz, att);
} }
@ -283,41 +305,21 @@ public class AttributeSource {
} }
return attClass.cast(attImpl); return attClass.cast(attImpl);
} }
/**
* This class holds the state of an AttributeSource.
* @see #captureState
* @see #restoreState
*/
public static final class State implements Cloneable {
AttributeImpl attribute;
State next;
@Override private State getCurrentState() {
public Object clone() { State s = currentState[0];
State clone = new State(); if (s != null || !hasAttributes()) {
clone.attribute = (AttributeImpl) attribute.clone(); return s;
if (next != null) {
clone.next = (State) next.clone();
}
return clone;
} }
} State c = s = currentState[0] = new State();
private State currentState = null;
private void computeCurrentState() {
currentState = new State();
State c = currentState;
final Iterator<AttributeImpl> it = attributeImpls.values().iterator(); final Iterator<AttributeImpl> it = attributeImpls.values().iterator();
c.attribute = it.next(); c.attribute = it.next();
while (it.hasNext()) { while (it.hasNext()) {
c.next = new State(); c.next = new State();
c = c.next; c = c.next;
c.attribute = it.next(); c.attribute = it.next();
} }
return s;
} }
/** /**
@ -325,13 +327,8 @@ public class AttributeSource {
* {@link AttributeImpl#clear()} on each Attribute implementation. * {@link AttributeImpl#clear()} on each Attribute implementation.
*/ */
public final void clearAttributes() { public final void clearAttributes() {
if (hasAttributes()) { for (State state = getCurrentState(); state != null; state = state.next) {
if (currentState == null) { state.attribute.clear();
computeCurrentState();
}
for (State state = currentState; state != null; state = state.next) {
state.attribute.clear();
}
} }
} }
@ -340,14 +337,8 @@ public class AttributeSource {
* {@link #restoreState} to restore the state of this or another AttributeSource. * {@link #restoreState} to restore the state of this or another AttributeSource.
*/ */
public final State captureState() { public final State captureState() {
if (!hasAttributes()) { final State state = this.getCurrentState();
return null; return (state == null) ? null : (State) state.clone();
}
if (currentState == null) {
computeCurrentState();
}
return (State) this.currentState.clone();
} }
/** /**
@ -382,15 +373,9 @@ public class AttributeSource {
@Override @Override
public int hashCode() { public int hashCode() {
int code = 0; int code = 0;
if (hasAttributes()) { for (State state = getCurrentState(); state != null; state = state.next) {
if (currentState == null) { code = code * 31 + state.attribute.hashCode();
computeCurrentState();
}
for (State state = currentState; state != null; state = state.next) {
code = code * 31 + state.attribute.hashCode();
}
} }
return code; return code;
} }
@ -413,14 +398,8 @@ public class AttributeSource {
} }
// it is only equal if all attribute impls are the same in the same order // it is only equal if all attribute impls are the same in the same order
if (this.currentState == null) { State thisState = this.getCurrentState();
this.computeCurrentState(); State otherState = other.getCurrentState();
}
State thisState = this.currentState;
if (other.currentState == null) {
other.computeCurrentState();
}
State otherState = other.currentState;
while (thisState != null && otherState != null) { while (thisState != null && otherState != null) {
if (otherState.attribute.getClass() != thisState.attribute.getClass() || !otherState.attribute.equals(thisState.attribute)) { if (otherState.attribute.getClass() != thisState.attribute.getClass() || !otherState.attribute.equals(thisState.attribute)) {
return false; return false;
@ -473,13 +452,8 @@ public class AttributeSource {
* @see AttributeImpl#reflectWith * @see AttributeImpl#reflectWith
*/ */
public final void reflectWith(AttributeReflector reflector) { public final void reflectWith(AttributeReflector reflector) {
if (hasAttributes()) { for (State state = getCurrentState(); state != null; state = state.next) {
if (currentState == null) { state.attribute.reflectWith(reflector);
computeCurrentState();
}
for (State state = currentState; state != null; state = state.next) {
state.attribute.reflectWith(reflector);
}
} }
} }
@ -495,10 +469,7 @@ public class AttributeSource {
if (hasAttributes()) { if (hasAttributes()) {
// first clone the impls // first clone the impls
if (currentState == null) { for (State state = getCurrentState(); state != null; state = state.next) {
computeCurrentState();
}
for (State state = currentState; state != null; state = state.next) {
clone.attributeImpls.put(state.attribute.getClass(), (AttributeImpl) state.attribute.clone()); clone.attributeImpls.put(state.attribute.getClass(), (AttributeImpl) state.attribute.clone());
} }
@ -520,18 +491,13 @@ public class AttributeSource {
* {@link #cloneAttributes} instead of {@link #captureState}. * {@link #cloneAttributes} instead of {@link #captureState}.
*/ */
public final void copyTo(AttributeSource target) { public final void copyTo(AttributeSource target) {
if (hasAttributes()) { for (State state = getCurrentState(); state != null; state = state.next) {
if (currentState == null) { final AttributeImpl targetImpl = target.attributeImpls.get(state.attribute.getClass());
computeCurrentState(); if (targetImpl == null) {
} throw new IllegalArgumentException("This AttributeSource contains AttributeImpl of type " +
for (State state = currentState; state != null; state = state.next) { state.attribute.getClass().getName() + " that is not in the target");
final AttributeImpl targetImpl = target.attributeImpls.get(state.attribute.getClass());
if (targetImpl == null) {
throw new IllegalArgumentException("This AttributeSource contains AttributeImpl of type " +
state.attribute.getClass().getName() + " that is not in the target");
}
state.attribute.copyTo(targetImpl);
} }
state.attribute.copyTo(targetImpl);
} }
} }

View File

@ -117,11 +117,24 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
assertEquals("type "+i, types[i], typeAtt.type()); assertEquals("type "+i, types[i], typeAtt.type());
if (posIncrements != null) if (posIncrements != null)
assertEquals("posIncrement "+i, posIncrements[i], posIncrAtt.getPositionIncrement()); assertEquals("posIncrement "+i, posIncrements[i], posIncrAtt.getPositionIncrement());
// we can enforce some basic things about a few attributes even if the caller doesn't check:
if (offsetAtt != null) {
assertTrue("startOffset must be >= 0", offsetAtt.startOffset() >= 0);
assertTrue("endOffset must be >= 0", offsetAtt.endOffset() >= 0);
assertTrue("endOffset must be >= startOffset", offsetAtt.endOffset() >= offsetAtt.startOffset());
}
if (posIncrAtt != null) {
assertTrue("posIncrement must be >= 0", posIncrAtt.getPositionIncrement() >= 0);
}
} }
assertFalse("end of stream", ts.incrementToken()); assertFalse("end of stream", ts.incrementToken());
ts.end(); ts.end();
if (finalOffset != null) if (finalOffset != null)
assertEquals("finalOffset ", finalOffset.intValue(), offsetAtt.endOffset()); assertEquals("finalOffset ", finalOffset.intValue(), offsetAtt.endOffset());
if (offsetAtt != null) {
assertTrue("finalOffset must be >= 0", offsetAtt.endOffset() >= 0);
}
ts.close(); ts.close();
} }

View File

@ -122,7 +122,7 @@ public class MockRandomCodec extends Codec {
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
// we pull this before the seed intentionally: because its not consumed at runtime // we pull this before the seed intentionally: because its not consumed at runtime
// (the skipInterval is written into postings header) // (the skipInterval is written into postings header)
int skipInterval = _TestUtil.nextInt(seedRandom, 2, 64); int skipInterval = _TestUtil.nextInt(seedRandom, 2, 10);
if (LuceneTestCase.VERBOSE) { if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: skipInterval=" + skipInterval); System.out.println("MockRandomCodec: skipInterval=" + skipInterval);

View File

@ -128,6 +128,9 @@ public abstract class LuceneTestCase extends Assert {
TEMP_DIR = new File(s); TEMP_DIR = new File(s);
TEMP_DIR.mkdirs(); TEMP_DIR.mkdirs();
} }
/** set of directories we created, in afterclass we try to clean these up */
static final Set<String> tempDirs = Collections.synchronizedSet(new HashSet<String>());
// by default we randomly pick a different codec for // by default we randomly pick a different codec for
// each test case (non-J4 tests) and each test class (J4 // each test case (non-J4 tests) and each test class (J4
@ -323,6 +326,7 @@ public abstract class LuceneTestCase extends Assert {
public static void beforeClassLuceneTestCaseJ4() { public static void beforeClassLuceneTestCaseJ4() {
staticSeed = "random".equals(TEST_SEED) ? seedRand.nextLong() : TwoLongs.fromString(TEST_SEED).l1; staticSeed = "random".equals(TEST_SEED) ? seedRand.nextLong() : TwoLongs.fromString(TEST_SEED).l1;
random.setSeed(staticSeed); random.setSeed(staticSeed);
tempDirs.clear();
stores = Collections.synchronizedMap(new IdentityHashMap<MockDirectoryWrapper,StackTraceElement[]>()); stores = Collections.synchronizedMap(new IdentityHashMap<MockDirectoryWrapper,StackTraceElement[]>());
savedCodecProvider = CodecProvider.getDefault(); savedCodecProvider = CodecProvider.getDefault();
if ("randomPerField".equals(TEST_CODEC)) { if ("randomPerField".equals(TEST_CODEC)) {
@ -411,6 +415,16 @@ public abstract class LuceneTestCase extends Assert {
+ "free=" + Runtime.getRuntime().freeMemory() + "," + "free=" + Runtime.getRuntime().freeMemory() + ","
+ "total=" + Runtime.getRuntime().totalMemory()); + "total=" + Runtime.getRuntime().totalMemory());
} }
// clear out any temp directories if we can
if (!testsFailed) {
for (String path : tempDirs) {
try {
_TestUtil.rmDir(new File(path));
} catch (IOException e) {
e.printStackTrace();
}
}
}
} }
private static boolean testsFailed; /* true if any tests failed */ private static boolean testsFailed; /* true if any tests failed */
@ -1058,6 +1072,7 @@ public abstract class LuceneTestCase extends Assert {
final File tmpFile = File.createTempFile("test", "tmp", TEMP_DIR); final File tmpFile = File.createTempFile("test", "tmp", TEMP_DIR);
tmpFile.delete(); tmpFile.delete();
tmpFile.mkdir(); tmpFile.mkdir();
tempDirs.add(tmpFile.getAbsolutePath());
return newFSDirectoryImpl(clazz.asSubclass(FSDirectory.class), tmpFile, null); return newFSDirectoryImpl(clazz.asSubclass(FSDirectory.class), tmpFile, null);
} }

View File

@ -54,7 +54,9 @@ public class _TestUtil {
/** Returns temp dir, containing String arg in its name; /** Returns temp dir, containing String arg in its name;
* does not create the directory. */ * does not create the directory. */
public static File getTempDir(String desc) { public static File getTempDir(String desc) {
return new File(LuceneTestCase.TEMP_DIR, desc + "." + new Random().nextLong()); File f = new File(LuceneTestCase.TEMP_DIR, desc + "." + new Random().nextLong());
LuceneTestCase.tempDirs.add(f.getAbsolutePath());
return f;
} }
/** /**
@ -89,7 +91,8 @@ public class _TestUtil {
rmDir(destDir); rmDir(destDir);
destDir.mkdir(); destDir.mkdir();
LuceneTestCase.tempDirs.add(destDir.getAbsolutePath());
while (entries.hasMoreElements()) { while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement(); ZipEntry entry = entries.nextElement();

View File

@ -1,5 +1,6 @@
package org.apache.lucene.analysis; package org.apache.lucene.analysis;
import java.io.StringReader;
import java.util.Arrays; import java.util.Arrays;
import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Automaton;
@ -95,4 +96,19 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
new String[] { "ok", "fine" }, new String[] { "ok", "fine" },
new int[] { 1, 2 }); new int[] { 1, 2 });
} }
public void testLUCENE_3042() throws Exception {
String testString = "t";
Analyzer analyzer = new MockAnalyzer(random);
TokenStream stream = analyzer.reusableTokenStream("dummy", new StringReader(testString));
stream.reset();
while (stream.incrementToken()) {
// consume
}
stream.end();
assertAnalyzesToReuse(analyzer, testString, new String[] { "t" });
}
} }

View File

@ -57,8 +57,7 @@ public class TestCompoundFile extends LuceneTestCase
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
File file = new File(TEMP_DIR, "testIndex"); File file = _TestUtil.getTempDir("testIndex");
_TestUtil.rmDir(file);
// use a simple FSDir here, to be sure to have SimpleFSInputs // use a simple FSDir here, to be sure to have SimpleFSInputs
dir = new SimpleFSDirectory(file,null); dir = new SimpleFSDirectory(file,null);
} }
@ -66,7 +65,6 @@ public class TestCompoundFile extends LuceneTestCase
@Override @Override
public void tearDown() throws Exception { public void tearDown() throws Exception {
dir.close(); dir.close();
_TestUtil.rmDir(new File(TEMP_DIR, "testIndex"));
super.tearDown(); super.tearDown();
} }

View File

@ -36,6 +36,7 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.index.codecs.CodecProvider; import org.apache.lucene.index.codecs.CodecProvider;
@ -60,10 +61,10 @@ public class TestDoc extends LuceneTestCase {
if (VERBOSE) { if (VERBOSE) {
System.out.println("TEST: setUp"); System.out.println("TEST: setUp");
} }
workDir = new File(TEMP_DIR,"TestDoc"); workDir = _TestUtil.getTempDir("TestDoc");
workDir.mkdirs(); workDir.mkdirs();
indexDir = new File(workDir, "testIndex"); indexDir = _TestUtil.getTempDir("testIndex");
indexDir.mkdirs(); indexDir.mkdirs();
Directory directory = newFSDirectory(indexDir); Directory directory = newFSDirectory(indexDir);

View File

@ -286,8 +286,7 @@ public class TestFieldsReader extends LuceneTestCase {
*/ */
public void testLazyPerformance() throws Exception { public void testLazyPerformance() throws Exception {
String userName = System.getProperty("user.name"); String userName = System.getProperty("user.name");
File file = new File(TEMP_DIR, "lazyDir" + userName); File file = _TestUtil.getTempDir("lazyDir" + userName);
_TestUtil.rmDir(file);
Directory tmpDir = newFSDirectory(file); Directory tmpDir = newFSDirectory(file);
assertTrue(tmpDir != null); assertTrue(tmpDir != null);
@ -473,7 +472,7 @@ public class TestFieldsReader extends LuceneTestCase {
// LUCENE-1262 // LUCENE-1262
public void testExceptions() throws Throwable { public void testExceptions() throws Throwable {
File indexDir = new File(TEMP_DIR, "testfieldswriterexceptions"); File indexDir = _TestUtil.getTempDir("testfieldswriterexceptions");
try { try {
Directory dir = new FaultyFSDirectory(indexDir); Directory dir = new FaultyFSDirectory(indexDir);

View File

@ -1139,7 +1139,7 @@ public class TestIndexReader extends LuceneTestCase
} }
public void testOpenReaderAfterDelete() throws IOException { public void testOpenReaderAfterDelete() throws IOException {
File dirFile = new File(TEMP_DIR, "deletetest"); File dirFile = _TestUtil.getTempDir("deletetest");
Directory dir = newFSDirectory(dirFile); Directory dir = newFSDirectory(dirFile);
try { try {
IndexReader.open(dir, false); IndexReader.open(dir, false);

View File

@ -1090,7 +1090,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
indexDir = new File(TEMP_DIR, "IndexReaderReopen"); indexDir = _TestUtil.getTempDir("IndexReaderReopen");
} }
public void testCloseOrig() throws Throwable { public void testCloseOrig() throws Throwable {

View File

@ -2756,7 +2756,7 @@ public class TestIndexWriter extends LuceneTestCase {
// Tests that if FSDir is opened w/ a NoLockFactory (or SingleInstanceLF), // Tests that if FSDir is opened w/ a NoLockFactory (or SingleInstanceLF),
// then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed // then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
// when listAll() was called in IndexFileDeleter. // when listAll() was called in IndexFileDeleter.
Directory dir = newFSDirectory(new File(TEMP_DIR, "emptyFSDirNoLock"), NoLockFactory.getNoLockFactory()); Directory dir = newFSDirectory(_TestUtil.getTempDir("emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close(); new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
dir.close(); dir.close();
} }

View File

@ -21,6 +21,7 @@ import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
@ -39,7 +40,7 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
if (this.__test_dir == null) { if (this.__test_dir == null) {
this.__test_dir = new File(TEMP_DIR, "testIndexWriter"); this.__test_dir = _TestUtil.getTempDir("testIndexWriter");
if (this.__test_dir.exists()) { if (this.__test_dir.exists()) {
throw new IOException("test directory \"" + this.__test_dir.getPath() + "\" already exists (please remove by hand)"); throw new IOException("test directory \"" + this.__test_dir.getPath() + "\" already exists (please remove by hand)");

View File

@ -41,7 +41,7 @@ public class TestIndexWriterOnJRECrash extends TestNRTThreads {
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
tempDir = File.createTempFile("jrecrash", "tmp", TEMP_DIR); tempDir = _TestUtil.getTempDir("jrecrash");
tempDir.delete(); tempDir.delete();
tempDir.mkdir(); tempDir.mkdir();
} }

View File

@ -67,7 +67,7 @@ public class TestLongPostings extends LuceneTestCase {
// Don't use _TestUtil.getTempDir so that we own the // Don't use _TestUtil.getTempDir so that we own the
// randomness (ie same seed will point to same dir): // randomness (ie same seed will point to same dir):
Directory dir = newFSDirectory(new File(LuceneTestCase.TEMP_DIR, "longpostings" + "." + random.nextLong())); Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random.nextLong()));
final int NUM_DOCS = (int) ((TEST_NIGHTLY ? 4e6 : (RANDOM_MULTIPLIER*2e4)) * (1+random.nextDouble())); final int NUM_DOCS = (int) ((TEST_NIGHTLY ? 4e6 : (RANDOM_MULTIPLIER*2e4)) * (1+random.nextDouble()));

View File

@ -240,7 +240,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
} }
public void testSetBufferSize() throws IOException { public void testSetBufferSize() throws IOException {
File indexDir = new File(TEMP_DIR, "testSetBufferSize"); File indexDir = _TestUtil.getTempDir("testSetBufferSize");
MockFSDirectory dir = new MockFSDirectory(indexDir, random); MockFSDirectory dir = new MockFSDirectory(indexDir, random);
try { try {
IndexWriter writer = new IndexWriter( IndexWriter writer = new IndexWriter(

View File

@ -42,7 +42,7 @@ public class TestDirectory extends LuceneTestCase {
// Test that different instances of FSDirectory can coexist on the same // Test that different instances of FSDirectory can coexist on the same
// path, can read, write, and lock files. // path, can read, write, and lock files.
public void testDirectInstantiation() throws Exception { public void testDirectInstantiation() throws Exception {
File path = new File(TEMP_DIR, "testDirectInstantiation"); File path = _TestUtil.getTempDir("testDirectInstantiation");
int sz = 3; int sz = 3;
Directory[] dirs = new Directory[sz]; Directory[] dirs = new Directory[sz];
@ -134,7 +134,7 @@ public class TestDirectory extends LuceneTestCase {
// LUCENE-1468 // LUCENE-1468
public void testFSDirectoryFilter() throws IOException { public void testFSDirectoryFilter() throws IOException {
checkDirectoryFilter(newFSDirectory(new File(TEMP_DIR,"test"))); checkDirectoryFilter(newFSDirectory(_TestUtil.getTempDir("test")));
} }
// LUCENE-1468 // LUCENE-1468
@ -151,7 +151,7 @@ public class TestDirectory extends LuceneTestCase {
// LUCENE-1468 // LUCENE-1468
public void testCopySubdir() throws Throwable { public void testCopySubdir() throws Throwable {
File path = new File(TEMP_DIR, "testsubdir"); File path = _TestUtil.getTempDir("testsubdir");
try { try {
path.mkdirs(); path.mkdirs();
new File(path, "subdir").mkdirs(); new File(path, "subdir").mkdirs();
@ -164,7 +164,7 @@ public class TestDirectory extends LuceneTestCase {
// LUCENE-1468 // LUCENE-1468
public void testNotDirectory() throws Throwable { public void testNotDirectory() throws Throwable {
File path = new File(TEMP_DIR, "testnotdir"); File path = _TestUtil.getTempDir("testnotdir");
Directory fsDir = new SimpleFSDirectory(path, null); Directory fsDir = new SimpleFSDirectory(path, null);
try { try {
IndexOutput out = fsDir.createOutput("afile"); IndexOutput out = fsDir.createOutput("afile");

View File

@ -41,7 +41,7 @@ public class TestMultiMMap extends LuceneTestCase {
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
workDir = new File(TEMP_DIR, "TestMultiMMap"); workDir = _TestUtil.getTempDir("TestMultiMMap");
workDir.mkdirs(); workDir.mkdirs();
} }

View File

@ -24,6 +24,7 @@ import java.io.ObjectOutputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
@ -49,7 +50,7 @@ public class TestRAMDirectory extends LuceneTestCase {
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
indexDir = new File(TEMP_DIR, "RAMDirIndex"); indexDir = _TestUtil.getTempDir("RAMDirIndex");
Directory dir = newFSDirectory(indexDir); Directory dir = newFSDirectory(indexDir);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(

View File

@ -20,6 +20,7 @@ package org.apache.lucene.store;
import java.io.File; import java.io.File;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
@ -59,7 +60,7 @@ public class TestWindowsMMap extends LuceneTestCase {
} }
private final static String storePathname = private final static String storePathname =
new File(TEMP_DIR,"testLuceneMmap").getAbsolutePath(); _TestUtil.getTempDir("testLuceneMmap").getAbsolutePath();
public void testMmapIndex() throws Exception { public void testMmapIndex() throws Exception {
// sometimes the directory is not cleaned by rmDir, because on Windows it // sometimes the directory is not cleaned by rmDir, because on Windows it

View File

@ -147,4 +147,14 @@ public class TestAttributeSource extends LuceneTestCase {
fail("Should throw IllegalArgumentException"); fail("Should throw IllegalArgumentException");
} catch (IllegalArgumentException iae) {} } catch (IllegalArgumentException iae) {}
} }
public void testLUCENE_3042() throws Exception {
final AttributeSource src1 = new AttributeSource();
src1.addAttribute(CharTermAttribute.class).append("foo");
int hash1 = src1.hashCode(); // this triggers a cached state
final AttributeSource src2 = new AttributeSource(src1);
src2.addAttribute(TypeAttribute.class).setType("bar");
assertTrue("The hashCode is identical, so the captured state was preserved.", hash1 != src1.hashCode());
assertEquals(src2.hashCode(), src1.hashCode());
}
} }

View File

@ -33,7 +33,8 @@ public enum LicenseType {
MPL("Mozilla Public License", false), //NOT SURE on the required notice MPL("Mozilla Public License", false), //NOT SURE on the required notice
PD("Public Domain", false), PD("Public Domain", false),
//SUNBCLA("Sun Binary Code License Agreement"), //SUNBCLA("Sun Binary Code License Agreement"),
SUN("Sun Open Source License", false) SUN("Sun Open Source License", false),
FAKE("FAKE license - not needed", false)
; ;
private String display; private String display;

View File

@ -105,8 +105,10 @@ New Features
levenshtein automata. (rmuir) levenshtein automata. (rmuir)
* SOLR-1873: SolrCloud - added shared/central config and core/shard managment via zookeeper, * SOLR-1873: SolrCloud - added shared/central config and core/shard managment via zookeeper,
built-in load balancing, and infrastructure for future SolrCloud work. built-in load balancing, and infrastructure for future SolrCloud work. (yonik, Mark Miller)
(yonik, Mark Miller) Additional Work:
SOLR-2324: SolrCloud solr.xml parameters are not persisted by CoreContainer.
(Massimo Schiavon, Mark Miller)
* SOLR-1729: Evaluation of NOW for date math is done only once per request for * SOLR-1729: Evaluation of NOW for date math is done only once per request for
consistency, and is also propagated to shards in distributed search. consistency, and is also propagated to shards in distributed search.
@ -129,6 +131,14 @@ New Features
* SOLR-2335: New 'field("...")' function syntax for refering to complex * SOLR-2335: New 'field("...")' function syntax for refering to complex
field names (containing whitespace or special characters) in functions. field names (containing whitespace or special characters) in functions.
* SOLR-1709: Distributed support for Date and Numeric Range Faceting
(Peter Sturge, David Smiley, hossman)
* SOLR-2383: /browse improvements: generalize range and date facet display
(Jan Høydahl via yonik)
Optimizations Optimizations
---------------------- ----------------------
@ -218,6 +228,8 @@ Other Changes
and publish binary, javadoc, and source test-framework jars. and publish binary, javadoc, and source test-framework jars.
(Drew Farris, Robert Muir, Steve Rowe) (Drew Farris, Robert Muir, Steve Rowe)
* SOLR-2461: QuerySenderListener and AbstractSolrEventListener are
now public (hossman)
Documentation Documentation
---------------------- ----------------------
@ -237,6 +249,10 @@ Carrot2 3.4.2
Upgrading from Solr 3.1 Upgrading from Solr 3.1
---------------------- ----------------------
* The updateRequestProcessorChain for a RequestHandler is now defined
with update.chain rather than update.processor. The latter still works,
but has been deprecated.
Detailed Change List Detailed Change List
---------------------- ----------------------
@ -256,9 +272,42 @@ Bug Fixes
* SOLR-2455: Prevent double submit of forms in admin interface. * SOLR-2455: Prevent double submit of forms in admin interface.
(Jeffrey Chang via uschindler) (Jeffrey Chang via uschindler)
* SOLR-2464: Fix potential slowness in QueryValueSource (the query() function) when
the query is very sparse and may not match any documents in a segment. (yonik)
* SOLR-2469: When using java replication with replicateAfter=startup, the first
commit point on server startup is never removed. (yonik)
* SOLR-2466: SolrJ's CommonsHttpSolrServer would retry requests on failure, regardless
of the configured maxRetries, due to HttpClient having it's own retry mechanism
by default. The retryCount of HttpClient is now set to 0, and SolrJ does
the retry. (yonik)
* SOLR-2409: edismax parser - treat the text of a fielded query as a literal if the
fieldname does not exist. For example Mission: Impossible should not search on
the "Mission" field unless it's a valid field in the schema. (Ryan McKinley, yonik)
* SOLR-2403: facet.sort=index reported incorrect results for distributed search
in a number of scenarios when facet.mincount>0. This patch also adds some
performance/algorithmic improvements when (facet.sort=count && facet.mincount=1
&& facet.limit=-1) and when (facet.sort=index && facet.mincount>0) (yonik)
* SOLR-2333: The "rename" core admin action does not persist the new name to solr.xml
(Rasmus Hahn, Paul R. Brown via Mark Miller)
* SOLR-2390: Performance of usePhraseHighlighter is terrible on very large Documents,
regardless of hl.maxDocCharsToAnalyze. (Mark Miller)
* SOLR-2474: The helper TokenStreams in analysis.jsp and AnalysisRequestHandlerBase
did not clear all attributes so they displayed incorrect attribute values for tokens
in later filter stages. (uschindler, rmuir, yonik)
Other Changes Other Changes
---------------------- ----------------------
* SOLR-2105: Rename RequestHandler param 'update.processor' to 'update.chain'.
(Jan Høydahl via Mark Miller)
Build Build
---------------------- ----------------------

View File

@ -120,7 +120,7 @@
<!-- Compile the project. --> <!-- Compile the project. -->
<target name="compile" <target name="compile"
description="Compile the source code." description="Compile the source code."
depends="validate-solr, compile-solrj"> depends="compile-solrj">
<solr-javac destdir="${dest}/solr" <solr-javac destdir="${dest}/solr"
classpathref="compile.classpath.solrj"> classpathref="compile.classpath.solrj">
@ -394,7 +394,7 @@
<!-- Run contrib unit tests. --> <!-- Run contrib unit tests. -->
<target name="test" <target name="test"
description="Runs the core unit tests." description="Runs the core unit tests."
depends="test-core, test-contrib, test-jsp" /> depends="validate-solr, test-core, test-contrib, test-jsp" />
<target name="junit" depends="compileTests,junit-mkdir,junit-sequential,junit-parallel"/> <target name="junit" depends="compileTests,junit-mkdir,junit-sequential,junit-parallel"/>

View File

@ -194,7 +194,7 @@ public class DataImportHandler extends RequestHandlerBase implements
IMPORT_CMD.equals(command)) { IMPORT_CMD.equals(command)) {
UpdateRequestProcessorChain processorChain = UpdateRequestProcessorChain processorChain =
req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_PROCESSOR)); req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_CHAIN));
UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp); UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
SolrResourceLoader loader = req.getCore().getResourceLoader(); SolrResourceLoader loader = req.getCore().getResourceLoader();
SolrWriter sw = getSolrWriter(processor, loader, requestParams, req); SolrWriter sw = getSolrWriter(processor, loader, requestParams, req);

View File

@ -108,7 +108,7 @@ public class UIMAUpdateRequestProcessorTest extends SolrTestCaseJ4 {
private void addDoc(String doc) throws Exception { private void addDoc(String doc) throws Exception {
Map<String, String[]> params = new HashMap<String, String[]>(); Map<String, String[]> params = new HashMap<String, String[]>();
params.put(UpdateParams.UPDATE_PROCESSOR, new String[] { "uima" }); params.put(UpdateParams.UPDATE_CHAIN, new String[] { "uima" });
MultiMapSolrParams mmparams = new MultiMapSolrParams(params); MultiMapSolrParams mmparams = new MultiMapSolrParams(params);
SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) { SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) {
}; };

View File

@ -855,7 +855,7 @@
--> -->
<requestHandler name="/update" class="solr.XmlUpdateRequestHandler"> <requestHandler name="/update" class="solr.XmlUpdateRequestHandler">
<lst name="defaults"> <lst name="defaults">
<str name="update.processor">uima</str> <str name="update.chain">uima</str>
</lst> </lst>
</requestHandler> </requestHandler>
@ -997,7 +997,7 @@
anyway. You have to link the chain to an update handler above to use anyway. You have to link the chain to an update handler above to use
it ie: <requestHandler name="/update it ie: <requestHandler name="/update
"class="solr.XmlUpdateRequestHandler"> <lst name="defaults"> <str "class="solr.XmlUpdateRequestHandler"> <lst name="defaults"> <str
name="update.processor">dedupe</str> </lst> </requestHandler> name="update.chain">dedupe</str> </lst> </requestHandler>
--> -->
<updateRequestProcessorChain name="uima"> <updateRequestProcessorChain name="uima">

View File

@ -774,6 +774,7 @@
<str name="q.alt">*:*</str> <str name="q.alt">*:*</str>
<str name="rows">10</str> <str name="rows">10</str>
<str name="fl">*,score</str> <str name="fl">*,score</str>
<str name="mlt.qf"> <str name="mlt.qf">
text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4 text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
</str> </str>
@ -792,6 +793,8 @@
<int name="facet.range.start">0</int> <int name="facet.range.start">0</int>
<int name="facet.range.end">600</int> <int name="facet.range.end">600</int>
<int name="facet.range.gap">50</int> <int name="facet.range.gap">50</int>
<str name="facet.range">popularity</str>
<int name="f.popularity.facet.range.gap">3</int>
<str name="facet.range.other">after</str> <str name="facet.range.other">after</str>
<str name="facet.date">manufacturedate_dt</str> <str name="facet.date">manufacturedate_dt</str>
<str name="facet.date.start">NOW/YEAR-10YEARS</str> <str name="facet.date.start">NOW/YEAR-10YEARS</str>
@ -841,7 +844,7 @@
--> -->
<!-- <!--
<lst name="defaults"> <lst name="defaults">
<str name="update.processor">dedupe</str> <str name="update.chain">dedupe</str>
</lst> </lst>
--> -->
</requestHandler> </requestHandler>

View File

@ -88,13 +88,13 @@
<ul> <ul>
#foreach ($facet in $field) #foreach ($facet in $field)
#set($theDate = $date.toDate("yyyy-MM-dd'T'HH:mm:ss'Z'", $facet.key)) #set($theDate = $date.toDate("yyyy-MM-dd'T'HH:mm:ss'Z'", $facet.key))
#set($value = '["' + $facet.key + '" TO "' + $facet.key + $gap + '"]') #set($value = '["' + $facet.key + '" TO "' + $facet.key + $gap + '"}')
#set($facetURL = "#url_for_facet_date_filter($fieldName, $value)") #set($facetURL = "#url_for_facet_date_filter($fieldName, $value)")
#if ($facetURL != '') #if ($facetURL != '')
#if ($facet.key != "gap" && $facet.key != "start" && $facet.key != "end" && $facet.key != "before" && $facet.key != "after") #if ($facet.key != "gap" && $facet.key != "start" && $facet.key != "end" && $facet.key != "before" && $facet.key != "after")
<li><a href="$facetURL">$date.format('MMM yyyy', $theDate)</a> ($facet.value)</li> <li><a href="$facetURL">$date.format('MMM yyyy', $theDate) $gap</a> ($facet.value)</li>
#end #end
#if ($facet.key == "before" && $facet.value > 0) #if ($facet.key == "before" && $facet.value > 0)
<li><a href="$facetURL">Before</a> ($facet.value)</li> <li><a href="$facetURL">Before</a> ($facet.value)</li>
@ -113,20 +113,20 @@
<span class="facet-field">$display</span> <span class="facet-field">$display</span>
<ul> <ul>
#if($before && $before != "") #if($before && $before != "")
#set($value = "[* TO " + $start + "]") #set($value = "[* TO " + $start + "}")
#set($facetURL = "#url_for_facet_range_filter($fieldName, $value)") #set($facetURL = "#url_for_facet_range_filter($fieldName, $value)")
<li><a href="$facetURL">Less than $start</a> ($before)</li> <li><a href="$facetURL">Less than $start</a> ($before)</li>
#end #end
#foreach ($facet in $field) #foreach ($facet in $field)
#set($rangeEnd = $math.add($facet.key, $gap)) #set($rangeEnd = $math.add($facet.key, $gap))
#set($value = "[" + $facet.key + " TO " + $rangeEnd + "]") #set($value = "[" + $facet.key + " TO " + $rangeEnd + "}")
#set($facetURL = "#url_for_facet_range_filter($fieldName, $value)") #set($facetURL = "#url_for_facet_range_filter($fieldName, $value)")
#if ($facetURL != '') #if ($facetURL != '')
<li><a href="$facetURL">$facet.key</a> ($facet.value)</li> <li><a href="$facetURL">$facet.key - $rangeEnd</a> ($facet.value)</li>
#end #end
#end #end
#if($end && $end != "") #if($end && $end != "" && $after > 0)
#set($value = "[" + $end + " TO *]") #set($value = "[" + $end + " TO *}")
#set($facetURL = "#url_for_facet_range_filter($fieldName, $value)") #set($facetURL = "#url_for_facet_range_filter($fieldName, $value)")
<li><a href="$facetURL">More than $math.toNumber($end)</a> ($after)</li> <li><a href="$facetURL">More than $math.toNumber($end)</a> ($after)</li>
#end #end
@ -180,4 +180,8 @@
$v $v
#end #end
#end #end
#end #end
#macro(capitalize $name)
${name.substring(0,1).toUpperCase()}${name.substring(1)}
#end

View File

@ -1,5 +1,9 @@
##TODO: Generically deal with all dates ##TODO: Generically deal with all dates
<h2 #annTitle("Facets generated by adding &facet.date= to the request")>Date Facets</h2> <h2 #annTitle("Facets generated by adding &facet.date= to the request")>Date Facets</h2>
#set($field = $response.response.facet_counts.facet_dates.manufacturedate_dt) #foreach ($field in $response.response.facet_counts.facet_dates)
#set($gap = $response.response.facet_counts.facet_dates.manufacturedate_dt.gap) #set($name = $field.key)
#display_facet_date($field, "Manufacture Date", "manufacturedate_dt", $gap) #set($display = "#capitalize($name)")
#set($f = $field.value)
#set($gap = $field.value.gap)
#display_facet_date($f, $display, $name, $gap)
#end

View File

@ -1,10 +1,12 @@
<h2 #annTitle("Facets generated by adding &facet.range= to the request")>Range Facets</h2> <h2 #annTitle("Facets generated by adding &facet.range= to the request")>Range Facets</h2>
#set($field = $response.response.facet_counts.facet_ranges.price.counts) #foreach ($field in $response.response.facet_counts.facet_ranges)
#set($start = $response.response.facet_counts.facet_ranges.price.start) #set($name = $field.key)
#set($end = $response.response.facet_counts.facet_ranges.price.end) #set($display = "#capitalize($name)")
#set($gap = $response.response.facet_counts.facet_ranges.price.gap) #set($f = $field.value.counts)
#set($before = $response.response.facet_counts.facet_ranges.price.before) #set($start = $field.value.start)
#set($after = $response.response.facet_counts.facet_ranges.price.after) #set($end = $field.value.end)
##TODO: Make this display the "range", not just the lower value #set($gap = $field.value.gap)
##TODO: Have a generic way to deal with ranges #set($before = $field.value.before)
#display_facet_range($field, "Price (in $)", "price", $start, $end, $gap, $before, $after) #set($after = $field.value.after)
#display_facet_range($f, $display, $name, $start, $end, $gap, $before, $after)
#end

View File

@ -18,6 +18,18 @@
margin-left: 20px; margin-left: 20px;
} }
.parsed_query_header {
font-family: Helvetica, Arial, sans-serif;
font-size: 10pt;
font-weight: bold;
}
.parsed_query {
font-family: Courier, Courier New, monospaced;
font-size: 10pt;
font-weight: normal;
}
body { body {
font-family: Helvetica, Arial, sans-serif; font-family: Helvetica, Arial, sans-serif;
font-size: 10pt; font-size: 10pt;

View File

@ -27,14 +27,16 @@
#end #end
#end #end
</div> </div>
<div class="parsed_query_header">
#if($request.params.get('debugQuery')) #if($request.params.get('debugQuery'))
<a href="#" onclick='jQuery(this).siblings("pre").toggle(); return false;'>toggle parsed query</a> <a href="#" onclick='jQuery(this).siblings("div").toggle(); return false;'>toggle parsed query</a>
<pre style="display:none">$response.response.debug.parsedquery</pre> <div class="parsed_query" style="display:none">$response.response.debug.parsedquery</div>
#end #end
#set($queryOpts = $request.params.get("queryOpts")) #set($queryOpts = $request.params.get("queryOpts"))
#if($queryOpts && $queryOpts != "") #if($queryOpts && $queryOpts != "")
<input type="hidden" name="queryOpts" value="$queryOpts"/> <input type="hidden" name="queryOpts" value="$queryOpts"/>
#end #end
</div>
</form> </form>
</div> </div>

View File

@ -61,7 +61,7 @@ public class SolrDocumentList extends ArrayList<SolrDocument>
public String toString() { public String toString() {
return "{numFound="+numFound return "{numFound="+numFound
+",start="+start +",start="+start
+ (maxScore!=null ? ""+maxScore : "") + (maxScore!=null ? ",maxScore="+maxScore : "")
+",docs="+super.toString() +",docs="+super.toString()
+"}"; +"}";
} }

View File

@ -43,8 +43,11 @@ public interface UpdateParams
/** Rollback update commands */ /** Rollback update commands */
public static String ROLLBACK = "rollback"; public static String ROLLBACK = "rollback";
/** Select the update processor to use. A RequestHandler may or may not respect this parameter */ /** Select the update processor chain to use. A RequestHandler may or may not respect this parameter */
public static final String UPDATE_PROCESSOR = "update.processor"; public static final String UPDATE_CHAIN = "update.chain";
// TODO: Include the old deprecated param, for removal in Solr 4.0
public static final String UPDATE_CHAIN_DEPRECATED = "update.processor";
/** /**
* If optimizing, set the maximum number of segments left in the index after optimization. 1 is the default (and is equivalent to calling IndexWriter.optimize() in Lucene). * If optimizing, set the maximum number of segments left in the index after optimization. 1 is the default (and is equivalent to calling IndexWriter.optimize() in Lucene).
*/ */

View File

@ -23,7 +23,7 @@ import org.apache.solr.search.SolrIndexSearcher;
/** /**
*/ */
class AbstractSolrEventListener implements SolrEventListener { public class AbstractSolrEventListener implements SolrEventListener {
protected final SolrCore core; protected final SolrCore core;
public AbstractSolrEventListener(SolrCore core) { public AbstractSolrEventListener(SolrCore core) {
this.core = core; this.core = core;

View File

@ -79,6 +79,7 @@ public class CoreContainer
protected Map<String ,IndexSchema> indexSchemaCache; protected Map<String ,IndexSchema> indexSchemaCache;
protected String adminHandler; protected String adminHandler;
protected boolean shareSchema; protected boolean shareSchema;
protected Integer zkClientTimeout;
protected String solrHome; protected String solrHome;
protected String defaultCoreName = ""; protected String defaultCoreName = "";
private ZkController zkController; private ZkController zkController;
@ -313,7 +314,7 @@ public class CoreContainer
zkHost = cfg.get("solr/@zkHost" , null); zkHost = cfg.get("solr/@zkHost" , null);
adminPath = cfg.get("solr/cores/@adminPath", null); adminPath = cfg.get("solr/cores/@adminPath", null);
shareSchema = cfg.getBool("solr/cores/@shareSchema", false); shareSchema = cfg.getBool("solr/cores/@shareSchema", false);
int zkClientTimeout = cfg.getInt("solr/cores/@zkClientTimeout", 10000); zkClientTimeout = cfg.getInt("solr/cores/@zkClientTimeout", 10000);
hostPort = System.getProperty("hostPort"); hostPort = System.getProperty("hostPort");
if (hostPort == null) { if (hostPort == null) {
@ -500,7 +501,12 @@ public class CoreContainer
SolrCore old = null; SolrCore old = null;
synchronized (cores) { synchronized (cores) {
old = cores.put(name, core); old = cores.put(name, core);
/*
* set both the name of the descriptor and the name of the
* core, since the descriptors name is used for persisting.
*/
core.setName(name); core.setName(name);
core.getCoreDescriptor().name = name;
} }
if (zkController != null) { if (zkController != null) {
@ -884,6 +890,7 @@ public class CoreContainer
if (this.libDir != null) { if (this.libDir != null) {
writeAttribute(w,"sharedLib",libDir); writeAttribute(w,"sharedLib",libDir);
} }
if(zkHost != null) writeAttribute(w, "zkHost", zkHost);
writeAttribute(w,"persistent",isPersistent()); writeAttribute(w,"persistent",isPersistent());
w.write(">\n"); w.write(">\n");
@ -892,9 +899,13 @@ public class CoreContainer
} }
w.write(" <cores"); w.write(" <cores");
writeAttribute(w, "adminPath",adminPath); writeAttribute(w, "adminPath",adminPath);
if(adminHandler != null) writeAttribute(w, "adminHandler",adminHandler); if(adminHandler != null) writeAttribute(w, "adminHandler", adminHandler);
if(shareSchema) writeAttribute(w, "shareSchema","true"); if(shareSchema) writeAttribute(w, "shareSchema", "true");
if(!defaultCoreName.equals("")) writeAttribute(w, "defaultCoreName",defaultCoreName); if(!defaultCoreName.equals("")) writeAttribute(w, "defaultCoreName", defaultCoreName);
if(host != null) writeAttribute(w, "host", host);
if(hostPort != null) writeAttribute(w, "hostPort", hostPort);
if(zkClientTimeout != null) writeAttribute(w, "zkClientTimeout", zkClientTimeout);
if(hostContext != null) writeAttribute(w, "hostContext", hostContext);
w.write(">\n"); w.write(">\n");
synchronized(cores) { synchronized(cores) {

View File

@ -32,7 +32,7 @@ import java.util.List;
/** /**
* @version $Id$ * @version $Id$
*/ */
class QuerySenderListener extends AbstractSolrEventListener { public class QuerySenderListener extends AbstractSolrEventListener {
public QuerySenderListener(SolrCore core) { public QuerySenderListener(SolrCore core) {
super(core); super(core);
} }

View File

@ -337,6 +337,7 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase {
@Override @Override
public boolean incrementToken() throws IOException { public boolean incrementToken() throws IOException {
if (tokenIterator.hasNext()) { if (tokenIterator.hasNext()) {
clearAttributes();
AttributeSource next = tokenIterator.next(); AttributeSource next = tokenIterator.next();
Iterator<Class<? extends Attribute>> atts = next.getAttributeClassesIterator(); Iterator<Class<? extends Attribute>> atts = next.getAttributeClassesIterator();
while (atts.hasNext()) // make sure all att impls in the token exist here while (atts.hasNext()) // make sure all att impls in the token exist here

View File

@ -24,6 +24,8 @@ import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.update.processor.UpdateRequestProcessor; import org.apache.solr.update.processor.UpdateRequestProcessor;
import org.apache.solr.update.processor.UpdateRequestProcessorChain; import org.apache.solr.update.processor.UpdateRequestProcessorChain;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
@ -31,12 +33,22 @@ import org.apache.solr.update.processor.UpdateRequestProcessorChain;
* *
**/ **/
public abstract class ContentStreamHandlerBase extends RequestHandlerBase { public abstract class ContentStreamHandlerBase extends RequestHandlerBase {
public static Logger log = LoggerFactory.getLogger(XmlUpdateRequestHandler.class);
@Override @Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
SolrParams params = req.getParams(); SolrParams params = req.getParams();
String updateChainName = null;
if(params.get(UpdateParams.UPDATE_CHAIN_DEPRECATED) != null) {
log.warn("Use of deprecated update request parameter "+UpdateParams.UPDATE_CHAIN_DEPRECATED+
" detected. Please use the new parameter "+UpdateParams.UPDATE_CHAIN+" instead, as support"+
" for "+UpdateParams.UPDATE_CHAIN_DEPRECATED+" will be removed in a later version.");
updateChainName = params.get(UpdateParams.UPDATE_CHAIN_DEPRECATED);
} else {
updateChainName = params.get(UpdateParams.UPDATE_CHAIN);
}
UpdateRequestProcessorChain processorChain = UpdateRequestProcessorChain processorChain =
req.getCore().getUpdateProcessingChain(params.get(UpdateParams.UPDATE_PROCESSOR)); req.getCore().getUpdateProcessingChain(updateChainName);
UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp); UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);

View File

@ -843,9 +843,13 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
indexCommitPoint = reader.getIndexCommit(); indexCommitPoint = reader.getIndexCommit();
} }
} finally { } finally {
// We don't need to save commit points for replication, the SolrDeletionPolicy
// always saves the last commit point (and the last optimized commit point, if needed)
/***
if(indexCommitPoint != null){ if(indexCommitPoint != null){
core.getDeletionPolicy().saveCommitPoint(indexCommitPoint.getVersion()); core.getDeletionPolicy().saveCommitPoint(indexCommitPoint.getVersion());
} }
***/
} }
} }
if (core.getUpdateHandler() instanceof DirectUpdateHandler2) { if (core.getUpdateHandler() instanceof DirectUpdateHandler2) {

View File

@ -190,7 +190,7 @@ public class CoreAdminHandler extends RequestHandlerBase {
} }
UpdateRequestProcessorChain processorChain = UpdateRequestProcessorChain processorChain =
core.getUpdateProcessingChain(params.get(UpdateParams.UPDATE_PROCESSOR)); core.getUpdateProcessingChain(params.get(UpdateParams.UPDATE_CHAIN));
wrappedReq = new LocalSolrQueryRequest(core, req.getParams()); wrappedReq = new LocalSolrQueryRequest(core, req.getParams());
UpdateRequestProcessor processor = UpdateRequestProcessor processor =
processorChain.createProcessor(wrappedReq, rsp); processorChain.createProcessor(wrappedReq, rsp);

View File

@ -17,23 +17,23 @@
package org.apache.solr.handler.component; package org.apache.solr.handler.component;
import java.io.IOException; import org.apache.lucene.queryParser.ParseException;
import java.net.URL; import org.apache.lucene.util.OpenBitSet;
import java.util.*; import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.FacetParams; import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils; import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.request.SimpleFacets; import org.apache.solr.request.SimpleFacets;
import org.apache.lucene.util.OpenBitSet;
import org.apache.solr.search.QueryParsing;
import org.apache.solr.schema.FieldType; import org.apache.solr.schema.FieldType;
import org.apache.lucene.queryParser.ParseException; import org.apache.solr.search.QueryParsing;
import java.io.IOException;
import java.net.URL;
import java.util.*;
/** /**
* TODO! * TODO!
@ -222,11 +222,37 @@ public class FacetComponent extends SearchComponent
sreq.params.remove(paramStart + FacetParams.FACET_MINCOUNT); sreq.params.remove(paramStart + FacetParams.FACET_MINCOUNT);
sreq.params.remove(paramStart + FacetParams.FACET_OFFSET); sreq.params.remove(paramStart + FacetParams.FACET_OFFSET);
dff.initialLimit = dff.offset + dff.limit; dff.initialLimit = dff.limit <= 0 ? dff.limit : dff.offset + dff.limit;
if(dff.sort.equals(FacetParams.FACET_SORT_COUNT) && dff.limit > 0) { if (dff.sort.equals(FacetParams.FACET_SORT_COUNT)) {
// set the initial limit higher to increase accuracy if (dff.limit > 0) {
dff.initialLimit = (int)(dff.initialLimit * 1.5) + 10; // set the initial limit higher to increase accuracy
dff.initialLimit = (int)(dff.initialLimit * 1.5) + 10;
dff.initialMincount = 0; // TODO: we could change this to 1, but would then need more refinement for small facet result sets?
} else {
// if limit==-1, then no need to artificially lower mincount to 0 if it's 1
dff.initialMincount = Math.min(dff.minCount, 1);
}
} else {
// we're sorting by index order.
// if minCount==0, we should always be able to get accurate results w/o over-requesting or refining
// if minCount==1, we should be able to get accurate results w/o over-requesting, but we'll need to refine
// if minCount==n (>1), we can set the initialMincount to minCount/nShards, rounded up.
// For example, we know that if minCount=10 and we have 3 shards, then at least one shard must have a count of 4 for the term
// For the minCount>1 case, we can generate too short of a list (miss terms at the end of the list) unless limit==-1
// For example: each shard could produce a list of top 10, but some of those could fail to make it into the combined list (i.e.
// we needed to go beyond the top 10 to generate the top 10 combined). Overrequesting can help a little here, but not as
// much as when sorting by count.
if (dff.minCount <= 1) {
dff.initialMincount = dff.minCount;
} else {
dff.initialMincount = (int)Math.ceil((double)dff.minCount / rb.slices.length);
// dff.initialMincount = 1;
}
}
if (dff.initialMincount != 0) {
sreq.params.set(paramStart + FacetParams.FACET_MINCOUNT, dff.initialMincount);
} }
// Currently this is for testing only and allows overriding of the // Currently this is for testing only and allows overriding of the
@ -286,8 +312,95 @@ public class FacetComponent extends SearchComponent
dff.add(shardNum, (NamedList)facet_fields.get(dff.getKey()), dff.initialLimit); dff.add(shardNum, (NamedList)facet_fields.get(dff.getKey()), dff.initialLimit);
} }
} }
}
// Distributed facet_dates
//
// The implementation below uses the first encountered shard's
// facet_dates as the basis for subsequent shards' data to be merged.
// (the "NOW" param should ensure consistency)
@SuppressWarnings("unchecked")
SimpleOrderedMap<SimpleOrderedMap<Object>> facet_dates =
(SimpleOrderedMap<SimpleOrderedMap<Object>>)
facet_counts.get("facet_dates");
if (facet_dates != null) {
// go through each facet_date
for (Map.Entry<String,SimpleOrderedMap<Object>> entry : facet_dates) {
final String field = entry.getKey();
if (fi.dateFacets.get(field) == null) {
// first time we've seen this field, no merging
fi.dateFacets.add(field, entry.getValue());
} else {
// not the first time, merge current field
SimpleOrderedMap<Object> shardFieldValues
= entry.getValue();
SimpleOrderedMap<Object> existFieldValues
= fi.dateFacets.get(field);
for (Map.Entry<String,Object> existPair : existFieldValues) {
final String key = existPair.getKey();
if (key.equals("gap") ||
key.equals("end") ||
key.equals("start")) {
// we can skip these, must all be the same across shards
continue;
}
// can be null if inconsistencies in shards responses
Integer newValue = (Integer) shardFieldValues.get(key);
if (null != newValue) {
Integer oldValue = ((Integer) existPair.getValue());
existPair.setValue(oldValue + newValue);
}
}
}
}
}
// Distributed facet_ranges
//
// The implementation below uses the first encountered shard's
// facet_ranges as the basis for subsequent shards' data to be merged.
@SuppressWarnings("unchecked")
SimpleOrderedMap<SimpleOrderedMap<Object>> facet_ranges =
(SimpleOrderedMap<SimpleOrderedMap<Object>>)
facet_counts.get("facet_ranges");
if (facet_ranges != null) {
// go through each facet_range
for (Map.Entry<String,SimpleOrderedMap<Object>> entry : facet_ranges) {
final String field = entry.getKey();
if (fi.rangeFacets.get(field) == null) {
// first time we've seen this field, no merging
fi.rangeFacets.add(field, entry.getValue());
} else {
// not the first time, merge current field counts
@SuppressWarnings("unchecked")
NamedList<Integer> shardFieldValues
= (NamedList<Integer>) entry.getValue().get("counts");
@SuppressWarnings("unchecked")
NamedList<Integer> existFieldValues
= (NamedList<Integer>) fi.rangeFacets.get(field).get("counts");
for (Map.Entry<String,Integer> existPair : existFieldValues) {
final String key = existPair.getKey();
// can be null if inconsistencies in shards responses
Integer newValue = shardFieldValues.get(key);
if (null != newValue) {
Integer oldValue = existPair.getValue();
existPair.setValue(oldValue + newValue);
}
}
}
}
}
}
// //
// This code currently assumes that there will be only a single // This code currently assumes that there will be only a single
@ -296,15 +409,18 @@ public class FacetComponent extends SearchComponent
// //
for (DistribFieldFacet dff : fi.facets.values()) { for (DistribFieldFacet dff : fi.facets.values()) {
if (dff.limit <= 0) continue; // no need to check these facets for refinement // no need to check these facets for refinement
if (dff.minCount <= 1 && dff.sort.equals(FacetParams.FACET_SORT_INDEX)) continue; if (dff.initialLimit <= 0 && dff.initialMincount == 0) continue;
@SuppressWarnings("unchecked") // generic array's are anoying // only other case where index-sort doesn't need refinement is if minCount==0
if (dff.minCount == 0 && dff.sort.equals(FacetParams.FACET_SORT_INDEX)) continue;
@SuppressWarnings("unchecked") // generic array's are annoying
List<String>[] tmp = (List<String>[]) new List[rb.shards.length]; List<String>[] tmp = (List<String>[]) new List[rb.shards.length];
dff._toRefine = tmp; dff._toRefine = tmp;
ShardFacetCount[] counts = dff.getCountSorted(); ShardFacetCount[] counts = dff.getCountSorted();
int ntop = Math.min(counts.length, dff.offset + dff.limit); int ntop = Math.min(counts.length, dff.limit >= 0 ? dff.offset + dff.limit : Integer.MAX_VALUE);
long smallestCount = counts.length == 0 ? 0 : counts[ntop-1].count; long smallestCount = counts.length == 0 ? 0 : counts[ntop-1].count;
for (int i=0; i<counts.length; i++) { for (int i=0; i<counts.length; i++) {
@ -313,8 +429,11 @@ public class FacetComponent extends SearchComponent
if (i<ntop) { if (i<ntop) {
// automatically flag the top values for refinement // automatically flag the top values for refinement
// this should always be true for facet.sort=index
needRefinement = true; needRefinement = true;
} else { } else {
// this logic should only be invoked for facet.sort=index (for now)
// calculate the maximum value that this term may have // calculate the maximum value that this term may have
// and if it is >= smallestCount, then flag for refinement // and if it is >= smallestCount, then flag for refinement
long maxCount = sfc.count; long maxCount = sfc.count;
@ -422,13 +541,32 @@ public class FacetComponent extends SearchComponent
counts = dff.getLexSorted(); counts = dff.getLexSorted();
} }
int end = dff.limit < 0 ? counts.length : Math.min(dff.offset + dff.limit, counts.length); if (countSorted) {
for (int i=dff.offset; i<end; i++) { int end = dff.limit < 0 ? counts.length : Math.min(dff.offset + dff.limit, counts.length);
if (counts[i].count < dff.minCount) { for (int i=dff.offset; i<end; i++) {
if (countSorted) break; // if sorted by count, we can break out of loop early if (counts[i].count < dff.minCount) {
else continue; break;
}
fieldCounts.add(counts[i].name, num(counts[i].count));
}
} else {
int off = dff.offset;
int lim = dff.limit >= 0 ? dff.limit : Integer.MAX_VALUE;
// index order...
for (int i=0; i<counts.length; i++) {
long count = counts[i].count;
if (count < dff.minCount) continue;
if (off > 0) {
off--;
continue;
}
if (lim <= 0) {
break;
}
lim--;
fieldCounts.add(counts[i].name, num(count));
} }
fieldCounts.add(counts[i].name, num(counts[i].count));
} }
if (dff.missing) { if (dff.missing) {
@ -436,9 +574,8 @@ public class FacetComponent extends SearchComponent
} }
} }
// TODO: facet dates & numbers facet_counts.add("facet_dates", fi.dateFacets);
facet_counts.add("facet_dates", new SimpleOrderedMap()); facet_counts.add("facet_ranges", fi.rangeFacets);
facet_counts.add("facet_ranges", new SimpleOrderedMap());
rb.rsp.add("facet_counts", facet_counts); rb.rsp.add("facet_counts", facet_counts);
@ -490,8 +627,14 @@ public class FacetComponent extends SearchComponent
* <b>This API is experimental and subject to change</b> * <b>This API is experimental and subject to change</b>
*/ */
public static class FacetInfo { public static class FacetInfo {
public LinkedHashMap<String,QueryFacet> queryFacets; public LinkedHashMap<String,QueryFacet> queryFacets;
public LinkedHashMap<String,DistribFieldFacet> facets; public LinkedHashMap<String,DistribFieldFacet> facets;
public SimpleOrderedMap<SimpleOrderedMap<Object>> dateFacets
= new SimpleOrderedMap<SimpleOrderedMap<Object>>();
public SimpleOrderedMap<SimpleOrderedMap<Object>> rangeFacets
= new SimpleOrderedMap<SimpleOrderedMap<Object>>();
public List<String> exceptionList; public List<String> exceptionList;
void parse(SolrParams params, ResponseBuilder rb) { void parse(SolrParams params, ResponseBuilder rb) {
@ -631,7 +774,8 @@ public class FacetComponent extends SearchComponent
public HashMap<String,ShardFacetCount> counts = new HashMap<String,ShardFacetCount>(128); public HashMap<String,ShardFacetCount> counts = new HashMap<String,ShardFacetCount>(128);
public int termNum; public int termNum;
public int initialLimit; // how many terms requested in first phase public int initialLimit; // how many terms requested in first phase
public int initialMincount; // mincount param sent to each shard
public boolean needRefinements; public boolean needRefinements;
public ShardFacetCount[] countSorted; public ShardFacetCount[] countSorted;
@ -671,11 +815,10 @@ public class FacetComponent extends SearchComponent
} }
} }
// the largest possible missing term is 0 if we received less // the largest possible missing term is initialMincount if we received less
// than the number requested (provided mincount==0 like it should be for // than the number requested.
// a shard request)
if (numRequested<0 || numRequested != 0 && numReceived < numRequested) { if (numRequested<0 || numRequested != 0 && numReceived < numRequested) {
last = 0; last = initialMincount;
} }
missingMaxPossible += last; missingMaxPossible += last;

View File

@ -435,12 +435,20 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
// fall back to analyzer // fall back to analyzer
tstream = createAnalyzerTStream(schema, fieldName, docTexts[j]); tstream = createAnalyzerTStream(schema, fieldName, docTexts[j]);
} }
int maxCharsToAnalyze = params.getFieldInt(fieldName,
HighlightParams.MAX_CHARS,
Highlighter.DEFAULT_MAX_CHARS_TO_ANALYZE);
Highlighter highlighter; Highlighter highlighter;
if (Boolean.valueOf(req.getParams().get(HighlightParams.USE_PHRASE_HIGHLIGHTER, "true"))) { if (Boolean.valueOf(req.getParams().get(HighlightParams.USE_PHRASE_HIGHLIGHTER, "true"))) {
// TODO: this is not always necessary - eventually we would like to avoid this wrap // TODO: this is not always necessary - eventually we would like to avoid this wrap
// when it is not needed. // when it is not needed.
tstream = new CachingTokenFilter(tstream); if (maxCharsToAnalyze < 0) {
tstream = new CachingTokenFilter(tstream);
} else {
tstream = new CachingTokenFilter(new OffsetLimitTokenFilter(tstream, maxCharsToAnalyze));
}
// get highlighter // get highlighter
highlighter = getPhraseHighlighter(query, fieldName, req, (CachingTokenFilter) tstream); highlighter = getPhraseHighlighter(query, fieldName, req, (CachingTokenFilter) tstream);
@ -453,9 +461,6 @@ public class DefaultSolrHighlighter extends SolrHighlighter implements PluginInf
highlighter = getHighlighter(query, fieldName, req); highlighter = getHighlighter(query, fieldName, req);
} }
int maxCharsToAnalyze = params.getFieldInt(fieldName,
HighlightParams.MAX_CHARS,
Highlighter.DEFAULT_MAX_CHARS_TO_ANALYZE);
if (maxCharsToAnalyze < 0) { if (maxCharsToAnalyze < 0) {
highlighter.setMaxDocCharsToAnalyze(docTexts[j].length()); highlighter.setMaxDocCharsToAnalyze(docTexts[j].length());
} else { } else {

View File

@ -316,7 +316,8 @@ class JSONWriter extends TextResponseWriter {
if( idx > 0 ) { if( idx > 0 ) {
writeArraySeparator(); writeArraySeparator();
} }
indent();
writeMapOpener(doc.size()); writeMapOpener(doc.size());
incLevel(); incLevel();

View File

@ -90,9 +90,10 @@ public class ValueSourceAugmenter extends DocTransformer
} }
int localId = docid - rcontext.docBase; int localId = docid - rcontext.docBase;
float val = values.floatVal(localId); // TODO: handle all types -- see: SOLR-2443 Object val = values.objectVal(localId);
if (val != null) {
doc.setField( name, val ); doc.setField( name, val );
}
} catch (IOException e) { } catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "exception at docid " + docid + " for valuesource " + valueSource, e, false); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "exception at docid " + docid + " for valuesource " + valueSource, e, false);
} }

View File

@ -486,6 +486,17 @@ class DateFieldSource extends FieldCacheSource {
} }
} }
@Override
public Object objectVal(int doc) {
int ord=termsIndex.getOrd(doc);
if (ord == 0) {
return null;
} else {
BytesRef br = termsIndex.lookup(ord, new BytesRef());
return ft.toObject(null, br);
}
}
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return description() + '=' + intVal(doc); return description() + '=' + intVal(doc);

View File

@ -28,6 +28,7 @@ import org.apache.lucene.util.ReaderUtil;
import org.apache.solr.response.TextResponseWriter; import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser; import org.apache.solr.search.QParser;
import org.apache.solr.search.function.DocValues; import org.apache.solr.search.function.DocValues;
import org.apache.solr.search.function.IntDocValues;
import org.apache.solr.search.function.ValueSource; import org.apache.solr.search.function.ValueSource;
/** /**
@ -157,37 +158,12 @@ public class RandomSortField extends FieldType {
@Override @Override
public DocValues getValues(Map context, final AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, final AtomicReaderContext readerContext) throws IOException {
return new DocValues() { return new IntDocValues(this) {
private final int seed = getSeed(field, readerContext); private final int seed = getSeed(field, readerContext);
@Override
public float floatVal(int doc) {
return (float)hash(doc+seed);
}
@Override @Override
public int intVal(int doc) { public int intVal(int doc) {
return hash(doc+seed); return hash(doc+seed);
} }
@Override
public long longVal(int doc) {
return (long)hash(doc+seed);
}
@Override
public double doubleVal(int doc) {
return (double)hash(doc+seed);
}
@Override
public String strVal(int doc) {
return Integer.toString(hash(doc+seed));
}
@Override
public String toString(int doc) {
return description() + '=' + intVal(doc);
}
}; };
} }

View File

@ -148,6 +148,12 @@ class SortableDoubleFieldSource extends FieldCacheSource {
return Double.toString(doubleVal(doc)); return Double.toString(doubleVal(doc));
} }
@Override
public Object objectVal(int doc) {
int ord=termsIndex.getOrd(doc);
return ord==0 ? null : NumberUtils.SortableStr2double(termsIndex.lookup(ord, spare));
}
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return description() + '=' + doubleVal(doc); return description() + '=' + doubleVal(doc);

View File

@ -153,6 +153,12 @@ class SortableFloatFieldSource extends FieldCacheSource {
return description() + '=' + floatVal(doc); return description() + '=' + floatVal(doc);
} }
@Override
public Object objectVal(int doc) {
int ord=termsIndex.getOrd(doc);
return ord==0 ? null : NumberUtils.SortableStr2float(termsIndex.lookup(ord, spare));
}
@Override @Override
public ValueFiller getValueFiller() { public ValueFiller getValueFiller() {
return new ValueFiller() { return new ValueFiller() {

View File

@ -155,6 +155,12 @@ class SortableIntFieldSource extends FieldCacheSource {
return description() + '=' + intVal(doc); return description() + '=' + intVal(doc);
} }
@Override
public Object objectVal(int doc) {
int ord=termsIndex.getOrd(doc);
return ord==0 ? null : NumberUtils.SortableStr2int(termsIndex.lookup(ord, spare));
}
@Override @Override
public ValueFiller getValueFiller() { public ValueFiller getValueFiller() {
return new ValueFiller() { return new ValueFiller() {

View File

@ -149,6 +149,12 @@ class SortableLongFieldSource extends FieldCacheSource {
return Long.toString(longVal(doc)); return Long.toString(longVal(doc));
} }
@Override
public Object objectVal(int doc) {
int ord=termsIndex.getOrd(doc);
return ord==0 ? null : NumberUtils.SortableStr2long(termsIndex.lookup(ord, spare));
}
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return description() + '=' + longVal(doc); return description() + '=' + longVal(doc);

View File

@ -19,9 +19,11 @@ package org.apache.solr.schema;
import org.apache.lucene.index.IndexReader.AtomicReaderContext; import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.noggit.CharArr;
import org.apache.solr.search.function.DocValues; import org.apache.solr.search.function.DocValues;
import org.apache.solr.search.function.FieldCacheSource; import org.apache.solr.search.function.FieldCacheSource;
import org.apache.solr.search.function.StringIndexDocValues; import org.apache.solr.search.function.StringIndexDocValues;
import org.apache.solr.util.ByteUtils;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
@ -40,32 +42,12 @@ public class StrFieldSource extends FieldCacheSource {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
return new StringIndexDocValues(this, readerContext, field) { return new StringIndexDocValues(this, readerContext, field) {
@Override @Override
protected String toTerm(String readableValue) { protected String toTerm(String readableValue) {
return readableValue; return readableValue;
} }
@Override
public float floatVal(int doc) {
return (float)intVal(doc);
}
@Override
public int intVal(int doc) {
int ord=termsIndex.getOrd(doc);
return ord;
}
@Override
public long longVal(int doc) {
return (long)intVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)intVal(doc);
}
@Override @Override
public int ordVal(int doc) { public int ordVal(int doc) {
return termsIndex.getOrd(doc); return termsIndex.getOrd(doc);
@ -77,13 +59,8 @@ public class StrFieldSource extends FieldCacheSource {
} }
@Override @Override
public String strVal(int doc) { public Object objectVal(int doc) {
int ord=termsIndex.getOrd(doc); return strVal(doc);
if (ord == 0) {
return null;
} else {
return termsIndex.lookup(ord, new BytesRef()).utf8ToString();
}
} }
@Override @Override

View File

@ -538,6 +538,11 @@ class TrieDateFieldSource extends LongFieldSource {
return new MutableValueDate(); return new MutableValueDate();
} }
@Override
public Object longToObject(long val) {
return new Date(val);
}
@Override @Override
public long externalToLong(String extVal) { public long externalToLong(String extVal) {
return TrieField.dateField.parseMath(null, extVal).getTime(); return TrieField.dateField.parseMath(null, extVal).getTime();

View File

@ -238,6 +238,7 @@ class ExtendedDismaxQParser extends QParser {
try { try {
up.setRemoveStopFilter(!stopwords); up.setRemoveStopFilter(!stopwords);
up.exceptions = true;
parsedUserQuery = up.parse(mainUserQuery); parsedUserQuery = up.parse(mainUserQuery);
if (stopwords && isEmpty(parsedUserQuery)) { if (stopwords && isEmpty(parsedUserQuery)) {
@ -247,6 +248,7 @@ class ExtendedDismaxQParser extends QParser {
} }
} catch (Exception e) { } catch (Exception e) {
// ignore failure and reparse later after escaping reserved chars // ignore failure and reparse later after escaping reserved chars
up.exceptions = false;
} }
if (parsedUserQuery != null && doMinMatched) { if (parsedUserQuery != null && doMinMatched) {
@ -785,12 +787,19 @@ class ExtendedDismaxQParser extends QParser {
RANGE RANGE
} }
static final RuntimeException unknownField = new RuntimeException("UnknownField");
static {
unknownField.fillInStackTrace();
}
/** /**
* A subclass of SolrQueryParser that supports aliasing fields for * A subclass of SolrQueryParser that supports aliasing fields for
* constructing DisjunctionMaxQueries. * constructing DisjunctionMaxQueries.
*/ */
class ExtendedSolrQueryParser extends SolrQueryParser { class ExtendedSolrQueryParser extends SolrQueryParser {
/** A simple container for storing alias info /** A simple container for storing alias info
*/ */
protected class Alias { protected class Alias {
@ -803,6 +812,7 @@ class ExtendedDismaxQParser extends QParser {
boolean allowWildcard=true; boolean allowWildcard=true;
int minClauseSize = 0; // minimum number of clauses per phrase query... int minClauseSize = 0; // minimum number of clauses per phrase query...
// used when constructing boosting part of query via sloppy phrases // used when constructing boosting part of query via sloppy phrases
boolean exceptions; // allow exceptions to be thrown (for example on a missing field)
ExtendedAnalyzer analyzer; ExtendedAnalyzer analyzer;
@ -982,6 +992,15 @@ class ExtendedDismaxQParser extends QParser {
return q; return q;
} }
} else { } else {
// verify that a fielded query is actually on a field that exists... if not,
// then throw an exception to get us out of here, and we'll treat it like a
// literal when we try the escape+re-parse.
if (exceptions) {
FieldType ft = schema.getFieldTypeNoEx(field);
if (ft == null) throw unknownField;
}
return getQuery(); return getQuery();
} }
} }

View File

@ -306,7 +306,7 @@ public class QueryParsing {
Boolean top = sp.getSortDirection(); Boolean top = sp.getSortDirection();
if (null == top) { if (null == top) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Can't determine Sort Order: " + sp); "Can't determine a Sort Order (asc or desc) in sort spec " + sp);
} }
if (SCORE.equals(field)) { if (SCORE.equals(field)) {
@ -638,7 +638,11 @@ public class QueryParsing {
} }
String v = val.substring(start,pos); String v = val.substring(start,pos);
return flt ? Double.parseDouble(v) : Long.parseLong(v); if (flt) {
return Double.parseDouble(v);
} else {
return Long.parseLong(v);
}
} }
double getDouble() throws ParseException { double getDouble() throws ParseException {

View File

@ -793,7 +793,7 @@ class LongConstValueSource extends ConstNumberSource {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
return new DocValues() { return new LongDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return fv; return fv;
@ -814,11 +814,6 @@ class LongConstValueSource extends ConstNumberSource {
return dv; return dv;
} }
@Override
public String strVal(int doc) {
return Long.toString(constant);
}
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return description(); return description();
@ -901,28 +896,12 @@ abstract class DoubleParser extends NamedParser {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DocValues vals = source.getValues(context, readerContext); final DocValues vals = source.getValues(context, readerContext);
return new DocValues() { return new DoubleDocValues(this) {
@Override
public float floatVal(int doc) {
return (float)doubleVal(doc);
}
@Override
public int intVal(int doc) {
return (int)doubleVal(doc);
}
@Override
public long longVal(int doc) {
return (long)doubleVal(doc);
}
@Override @Override
public double doubleVal(int doc) { public double doubleVal(int doc) {
return func(doc, vals); return func(doc, vals);
} }
@Override @Override
public String strVal(int doc) {
return Double.toString(doubleVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
return name() + '(' + vals.toString(doc) + ')'; return name() + '(' + vals.toString(doc) + ')';
} }
@ -966,28 +945,12 @@ abstract class Double2Parser extends NamedParser {
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DocValues aVals = a.getValues(context, readerContext); final DocValues aVals = a.getValues(context, readerContext);
final DocValues bVals = b.getValues(context, readerContext); final DocValues bVals = b.getValues(context, readerContext);
return new DocValues() { return new DoubleDocValues(this) {
@Override @Override
public float floatVal(int doc) {
return (float)doubleVal(doc);
}
@Override
public int intVal(int doc) {
return (int)doubleVal(doc);
}
@Override
public long longVal(int doc) {
return (long)doubleVal(doc);
}
@Override
public double doubleVal(int doc) { public double doubleVal(int doc) {
return func(doc, aVals, bVals); return func(doc, aVals, bVals);
} }
@Override @Override
public String strVal(int doc) {
return Double.toString(doubleVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')'; return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')';
} }

View File

@ -88,6 +88,11 @@ public class ByteFieldSource extends NumericFieldCacheSource<ByteValues> {
return description() + '=' + byteVal(doc); return description() + '=' + byteVal(doc);
} }
@Override
public Object objectVal(int doc) {
return arr[doc]; // TODO: valid?
}
}; };
} }
} }

View File

@ -41,7 +41,7 @@ public class ConstValueSource extends ConstNumberSource {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return constant; return constant;
@ -59,13 +59,13 @@ public class ConstValueSource extends ConstNumberSource {
return dv; return dv;
} }
@Override @Override
public String strVal(int doc) {
return Float.toString(constant);
}
@Override
public String toString(int doc) { public String toString(int doc) {
return description(); return description();
} }
@Override
public Object objectVal(int doc) {
return constant;
}
}; };
} }

View File

@ -21,14 +21,13 @@ import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.solr.search.MutableValueInt; import org.apache.solr.search.*;
import org.apache.solr.search.MutableValue;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
class ConstIntDocValues extends DocValues { class ConstIntDocValues extends IntDocValues {
final int ival; final int ival;
final float fval; final float fval;
final double dval; final double dval;
@ -37,6 +36,7 @@ class ConstIntDocValues extends DocValues {
final ValueSource parent; final ValueSource parent;
ConstIntDocValues(int val, ValueSource parent) { ConstIntDocValues(int val, ValueSource parent) {
super(parent);
ival = val; ival = val;
fval = val; fval = val;
dval = val; dval = val;
@ -71,7 +71,7 @@ class ConstIntDocValues extends DocValues {
} }
} }
class ConstDoubleDocValues extends DocValues { class ConstDoubleDocValues extends DoubleDocValues {
final int ival; final int ival;
final float fval; final float fval;
final double dval; final double dval;
@ -80,6 +80,7 @@ class ConstDoubleDocValues extends DocValues {
final ValueSource parent; final ValueSource parent;
ConstDoubleDocValues(double val, ValueSource parent) { ConstDoubleDocValues(double val, ValueSource parent) {
super(parent);
ival = (int)val; ival = (int)val;
fval = (float)val; fval = (float)val;
dval = val; dval = val;
@ -114,115 +115,6 @@ class ConstDoubleDocValues extends DocValues {
} }
} }
abstract class FloatDocValues extends DocValues {
protected final ValueSource vs;
public FloatDocValues(ValueSource vs) {
this.vs = vs;
}
@Override
public byte byteVal(int doc) {
return (byte)floatVal(doc);
}
@Override
public short shortVal(int doc) {
return (short)floatVal(doc);
}
@Override
public abstract float floatVal(int doc);
@Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public String toString(int doc) {
return vs.description() + '=' + strVal(doc);
}
}
abstract class IntDocValues extends DocValues {
protected final ValueSource vs;
public IntDocValues(ValueSource vs) {
this.vs = vs;
}
@Override
public byte byteVal(int doc) {
return (byte)intVal(doc);
}
@Override
public short shortVal(int doc) {
return (short)intVal(doc);
}
@Override
public float floatVal(int doc) {
return (float)intVal(doc);
}
@Override
public abstract int intVal(int doc);
@Override
public long longVal(int doc) {
return (long)intVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)intVal(doc);
}
@Override
public String strVal(int doc) {
return Integer.toString(intVal(doc));
}
@Override
public String toString(int doc) {
return vs.description() + '=' + strVal(doc);
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueInt mval = new MutableValueInt();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = intVal(doc);
}
};
}
}
/** /**
* <code>DocFreqValueSource</code> returns the number of documents containing the term. * <code>DocFreqValueSource</code> returns the number of documents containing the term.

View File

@ -19,6 +19,7 @@ package org.apache.solr.search.function;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.search.MutableValue; import org.apache.solr.search.MutableValue;
import org.apache.solr.search.MutableValueFloat; import org.apache.solr.search.MutableValueFloat;
@ -47,6 +48,28 @@ public abstract class DocValues {
// TODO: should we make a termVal, returns BytesRef? // TODO: should we make a termVal, returns BytesRef?
public String strVal(int doc) { throw new UnsupportedOperationException(); } public String strVal(int doc) { throw new UnsupportedOperationException(); }
/** returns the bytes representation of the string val - TODO: should this return the indexed raw bytes not? */
public boolean bytesVal(int doc, BytesRef target) {
String s = strVal(doc);
if (s==null) {
target.length = 0;
return false;
}
target.copy(s);
return true;
};
/** Native Java Object representation of the value */
public Object objectVal(int doc) {
// most DocValues are functions, so by default return a Float()
return floatVal(doc);
}
/** Returns true if there is a value for this document */
public boolean exists(int doc) {
return true;
}
/** /**
* @param doc The doc to retrieve to sort ordinal for * @param doc The doc to retrieve to sort ordinal for
* @return the sort ordinal for the specified doc * @return the sort ordinal for the specified doc

View File

@ -40,7 +40,7 @@ public class DoubleConstValueSource extends ConstNumberSource {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
return new DocValues() { return new DoubleDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return fv; return fv;
@ -66,6 +66,11 @@ public class DoubleConstValueSource extends ConstNumberSource {
return Double.toString(constant); return Double.toString(constant);
} }
@Override
public Object objectVal(int doc) {
return constant;
}
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return description(); return description();

View File

@ -0,0 +1,74 @@
package org.apache.solr.search.function;
import org.apache.solr.search.MutableValue;
import org.apache.solr.search.MutableValueDouble;
public abstract class DoubleDocValues extends DocValues {
protected final ValueSource vs;
public DoubleDocValues(ValueSource vs) {
this.vs = vs;
}
@Override
public byte byteVal(int doc) {
return (byte)doubleVal(doc);
}
@Override
public short shortVal(int doc) {
return (short)doubleVal(doc);
}
@Override
public float floatVal(int doc) {
return (float)doubleVal(doc);
}
@Override
public int intVal(int doc) {
return (int)doubleVal(doc);
}
@Override
public long longVal(int doc) {
return (long)doubleVal(doc);
}
@Override
public abstract double doubleVal(int doc);
@Override
public String strVal(int doc) {
return Double.toString(doubleVal(doc));
}
@Override
public Object objectVal(int doc) {
return exists(doc) ? doubleVal(doc) : null;
}
@Override
public String toString(int doc) {
return vs.description() + '=' + strVal(doc);
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueDouble mval = new MutableValueDouble();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = doubleVal(doc);
mval.exists = exists(doc);
}
};
}
}

View File

@ -51,7 +51,7 @@ public class DoubleFieldSource extends NumericFieldCacheSource<DoubleValues> {
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DoubleValues vals = cache.getDoubles(readerContext.reader, field, creator); final DoubleValues vals = cache.getDoubles(readerContext.reader, field, creator);
final double[] arr = vals.values; final double[] arr = vals.values;
final Bits valid = vals.valid; final Bits valid = vals.valid;
return new DocValues() { return new DocValues() {
@Override @Override
@ -79,6 +79,11 @@ public class DoubleFieldSource extends NumericFieldCacheSource<DoubleValues> {
return Double.toString(arr[doc]); return Double.toString(arr[doc]);
} }
@Override
public Object objectVal(int doc) {
return valid.get(doc) ? arr[doc] : null;
}
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return description() + '=' + doubleVal(doc); return description() + '=' + doubleVal(doc);

View File

@ -48,28 +48,12 @@ public abstract class DualFloatFunction extends ValueSource {
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DocValues aVals = a.getValues(context, readerContext); final DocValues aVals = a.getValues(context, readerContext);
final DocValues bVals = b.getValues(context, readerContext); final DocValues bVals = b.getValues(context, readerContext);
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return func(doc, aVals, bVals); return func(doc, aVals, bVals);
} }
@Override @Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')'; return name() + '(' + aVals.toString(doc) + ',' + bVals.toString(doc) + ')';
} }

View File

@ -65,35 +65,15 @@ public class FileFloatSource extends ValueSource {
final int off = offset; final int off = offset;
final float[] arr = getCachedFloats(topLevelContext.reader); final float[] arr = getCachedFloats(topLevelContext.reader);
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return arr[doc + off]; return arr[doc + off];
} }
@Override @Override
public int intVal(int doc) { public Object objectVal(int doc) {
return (int)arr[doc + off]; return floatVal(doc); // TODO: keep track of missing values
}
@Override
public long longVal(int doc) {
return (long)arr[doc + off];
}
@Override
public double doubleVal(int doc) {
return (double)arr[doc + off];
}
@Override
public String strVal(int doc) {
return Float.toString(arr[doc + off]);
}
@Override
public String toString(int doc) {
return description() + '=' + floatVal(doc);
} }
}; };
} }

View File

@ -0,0 +1,73 @@
package org.apache.solr.search.function;
import org.apache.solr.search.MutableValue;
import org.apache.solr.search.MutableValueFloat;
public abstract class FloatDocValues extends DocValues {
protected final ValueSource vs;
public FloatDocValues(ValueSource vs) {
this.vs = vs;
}
@Override
public byte byteVal(int doc) {
return (byte)floatVal(doc);
}
@Override
public short shortVal(int doc) {
return (short)floatVal(doc);
}
@Override
public abstract float floatVal(int doc);
@Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public Object objectVal(int doc) {
return exists(doc) ? floatVal(doc) : null;
}
@Override
public String toString(int doc) {
return vs.description() + '=' + strVal(doc);
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueFloat mval = new MutableValueFloat();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = floatVal(doc);
mval.exists = exists(doc);
}
};
}
}

View File

@ -50,37 +50,22 @@ public class FloatFieldSource extends NumericFieldCacheSource<FloatValues> {
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final FloatValues vals = cache.getFloats(readerContext.reader, field, creator); final FloatValues vals = cache.getFloats(readerContext.reader, field, creator);
final float[] arr = vals.values; final float[] arr = vals.values;
final Bits valid = vals.valid; final Bits valid = vals.valid;
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return arr[doc]; return arr[doc];
} }
@Override @Override
public int intVal(int doc) { public Object objectVal(int doc) {
return (int)arr[doc]; return valid.get(doc) ? arr[doc] : null;
} }
@Override @Override
public long longVal(int doc) { public boolean exists(int doc) {
return (long)arr[doc]; return valid.get(doc);
}
@Override
public double doubleVal(int doc) {
return (double)arr[doc];
}
@Override
public String strVal(int doc) {
return Float.toString(arr[doc]);
}
@Override
public String toString(int doc) {
return description() + '=' + floatVal(doc);
} }
@Override @Override

View File

@ -0,0 +1,73 @@
package org.apache.solr.search.function;
import org.apache.solr.search.MutableValue;
import org.apache.solr.search.MutableValueInt;
public abstract class IntDocValues extends DocValues {
protected final ValueSource vs;
public IntDocValues(ValueSource vs) {
this.vs = vs;
}
@Override
public byte byteVal(int doc) {
return (byte)intVal(doc);
}
@Override
public short shortVal(int doc) {
return (short)intVal(doc);
}
@Override
public float floatVal(int doc) {
return (float)intVal(doc);
}
@Override
public abstract int intVal(int doc);
@Override
public long longVal(int doc) {
return (long)intVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)intVal(doc);
}
@Override
public String strVal(int doc) {
return Integer.toString(intVal(doc));
}
@Override
public Object objectVal(int doc) {
return exists(doc) ? intVal(doc) : null;
}
@Override
public String toString(int doc) {
return vs.description() + '=' + strVal(doc);
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueInt mval = new MutableValueInt();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = intVal(doc);
mval.exists = exists(doc);
}
};
}
}

View File

@ -51,9 +51,9 @@ public class IntFieldSource extends NumericFieldCacheSource<IntValues> {
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final IntValues vals = cache.getInts(readerContext.reader, field, creator); final IntValues vals = cache.getInts(readerContext.reader, field, creator);
final int[] arr = vals.values; final int[] arr = vals.values;
final Bits valid = vals.valid; final Bits valid = vals.valid;
return new DocValues() { return new IntDocValues(this) {
final MutableValueInt val = new MutableValueInt(); final MutableValueInt val = new MutableValueInt();
@Override @Override
@ -81,6 +81,16 @@ public class IntFieldSource extends NumericFieldCacheSource<IntValues> {
return Float.toString(arr[doc]); return Float.toString(arr[doc]);
} }
@Override
public Object objectVal(int doc) {
return valid.get(doc) ? arr[doc] : null;
}
@Override
public boolean exists(int doc) {
return valid.get(doc);
}
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return description() + '=' + intVal(doc); return description() + '=' + intVal(doc);

View File

@ -54,7 +54,7 @@ public class JoinDocFreqValueSource extends FieldCacheSource {
final DocTerms terms = cache.getTerms(readerContext.reader, field, true ); final DocTerms terms = cache.getTerms(readerContext.reader, field, true );
final IndexReader top = ReaderUtil.getTopLevelContext(readerContext).reader; final IndexReader top = ReaderUtil.getTopLevelContext(readerContext).reader;
return new DocValues() { return new IntDocValues(this) {
BytesRef ref = new BytesRef(); BytesRef ref = new BytesRef();
@Override @Override
@ -70,31 +70,6 @@ public class JoinDocFreqValueSource extends FieldCacheSource {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in function "+description()+" : doc="+doc, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in function "+description()+" : doc="+doc, e);
} }
} }
@Override
public float floatVal(int doc) {
return (float)intVal(doc);
}
@Override
public long longVal(int doc) {
return (long)intVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)intVal(doc);
}
@Override
public String strVal(int doc) {
return intVal(doc) + "";
}
@Override
public String toString(int doc) {
return description() + '=' + intVal(doc);
}
}; };
} }

View File

@ -50,28 +50,12 @@ public class LinearFloatFunction extends ValueSource {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DocValues vals = source.getValues(context, readerContext); final DocValues vals = source.getValues(context, readerContext);
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return vals.floatVal(doc) * slope + intercept; return vals.floatVal(doc) * slope + intercept;
} }
@Override @Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
return slope + "*float(" + vals.toString(doc) + ")+" + intercept; return slope + "*float(" + vals.toString(doc) + ")+" + intercept;
} }

View File

@ -17,19 +17,23 @@ package org.apache.solr.search.function;
*/ */
import org.apache.lucene.index.IndexReader.AtomicReaderContext; import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.util.BytesRef;
import java.util.Map; import java.util.Map;
import java.io.IOException; import java.io.IOException;
/** /**
* Pass a the field value through as a String, no matter the type * Pass a the field value through as a String, no matter the type // Q: doesn't this mean it's a "string"?
* *
**/ **/
public class LiteralValueSource extends ValueSource { public class LiteralValueSource extends ValueSource {
protected final String string; protected final String string;
protected final BytesRef bytesRef;
public LiteralValueSource(String string) { public LiteralValueSource(String string) {
this.string = string; this.string = string;
this.bytesRef = new BytesRef(string);
} }
/** returns the literal value */ /** returns the literal value */
@ -40,12 +44,18 @@ public class LiteralValueSource extends ValueSource {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
return new DocValues() { return new StrDocValues(this) {
@Override @Override
public String strVal(int doc) { public String strVal(int doc) {
return string; return string;
} }
@Override
public boolean bytesVal(int doc, BytesRef target) {
target.copy(bytesRef);
return true;
}
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return string; return string;

View File

@ -0,0 +1,73 @@
package org.apache.solr.search.function;
import org.apache.solr.search.MutableValue;
import org.apache.solr.search.MutableValueLong;
public abstract class LongDocValues extends DocValues {
protected final ValueSource vs;
public LongDocValues(ValueSource vs) {
this.vs = vs;
}
@Override
public byte byteVal(int doc) {
return (byte)longVal(doc);
}
@Override
public short shortVal(int doc) {
return (short)longVal(doc);
}
@Override
public float floatVal(int doc) {
return (float)longVal(doc);
}
@Override
public int intVal(int doc) {
return (int)longVal(doc);
}
@Override
public abstract long longVal(int doc);
@Override
public double doubleVal(int doc) {
return (double)longVal(doc);
}
@Override
public String strVal(int doc) {
return Long.toString(longVal(doc));
}
@Override
public Object objectVal(int doc) {
return exists(doc) ? longVal(doc) : null;
}
@Override
public String toString(int doc) {
return vs.description() + '=' + strVal(doc);
}
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueLong mval = new MutableValueLong();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = longVal(doc);
mval.exists = exists(doc);
}
};
}
}

View File

@ -52,41 +52,30 @@ public class LongFieldSource extends NumericFieldCacheSource<LongValues> {
return Long.parseLong(extVal); return Long.parseLong(extVal);
} }
public Object longToObject(long val) {
return val;
}
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final LongValues vals = cache.getLongs(readerContext.reader, field, creator); final LongValues vals = cache.getLongs(readerContext.reader, field, creator);
final long[] arr = vals.values; final long[] arr = vals.values;
final Bits valid = vals.valid; final Bits valid = vals.valid;
return new DocValues() { return new LongDocValues(this) {
@Override
public float floatVal(int doc) {
return (float) arr[doc];
}
@Override
public int intVal(int doc) {
return (int) arr[doc];
}
@Override @Override
public long longVal(int doc) { public long longVal(int doc) {
return arr[doc]; return arr[doc];
} }
@Override @Override
public double doubleVal(int doc) { public boolean exists(int doc) {
return arr[doc]; return valid.get(doc);
} }
@Override @Override
public String strVal(int doc) { public Object objectVal(int doc) {
return Long.toString(arr[doc]); return valid.get(doc) ? longToObject(arr[doc]) : null;
}
@Override
public String toString(int doc) {
return description() + '=' + longVal(doc);
} }
@Override @Override
@ -142,8 +131,6 @@ public class LongFieldSource extends NumericFieldCacheSource<LongValues> {
}; };
} }
}; };
} }

View File

@ -62,28 +62,12 @@ public abstract class MultiFloatFunction extends ValueSource {
valsArr[i] = sources[i].getValues(context, readerContext); valsArr[i] = sources[i].getValues(context, readerContext);
} }
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return func(doc, valsArr); return func(doc, valsArr);
} }
@Override @Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append(name()).append('('); sb.append(name()).append('(');

View File

@ -63,53 +63,29 @@ public class OrdFieldSource extends ValueSource {
final int off = readerContext.docBase; final int off = readerContext.docBase;
final IndexReader topReader = ReaderUtil.getTopLevelContext(readerContext).reader; final IndexReader topReader = ReaderUtil.getTopLevelContext(readerContext).reader;
final FieldCache.DocTermsIndex sindex = FieldCache.DEFAULT.getTermsIndex(topReader, field); final FieldCache.DocTermsIndex sindex = FieldCache.DEFAULT.getTermsIndex(topReader, field);
return new DocValues() { return new IntDocValues(this) {
protected String toTerm(String readableValue) { protected String toTerm(String readableValue) {
return readableValue; return readableValue;
} }
@Override
public float floatVal(int doc) {
return (float)sindex.getOrd(doc+off);
}
@Override @Override
public int intVal(int doc) { public int intVal(int doc) {
return sindex.getOrd(doc+off); return sindex.getOrd(doc+off);
} }
@Override
public long longVal(int doc) {
return (long)sindex.getOrd(doc+off);
}
@Override
public double doubleVal(int doc) {
return (double)sindex.getOrd(doc+off);
}
@Override @Override
public int ordVal(int doc) { public int ordVal(int doc) {
return sindex.getOrd(doc+off); return sindex.getOrd(doc+off);
} }
@Override @Override
public int numOrd() { public int numOrd() {
return sindex.numOrd(); return sindex.numOrd();
} }
@Override @Override
public String strVal(int doc) { public boolean exists(int doc) {
// the string value of the ordinal, not the string itself return sindex.getOrd(doc+off) != 0;
return Integer.toString(sindex.getOrd(doc+off));
} }
@Override @Override
public String toString(int doc) {
return description() + '=' + intVal(doc);
}
@Override
public ValueFiller getValueFiller() { public ValueFiller getValueFiller() {
return new ValueFiller() { return new ValueFiller() {
private final MutableValueInt mval = new MutableValueInt(); private final MutableValueInt mval = new MutableValueInt();

View File

@ -23,6 +23,8 @@ import org.apache.lucene.search.*;
import org.apache.lucene.search.Weight.ScorerContext; import org.apache.lucene.search.Weight.ScorerContext;
import org.apache.lucene.util.ReaderUtil; import org.apache.lucene.util.ReaderUtil;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.search.MutableValue;
import org.apache.solr.search.MutableValueFloat;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
@ -49,7 +51,7 @@ public class QueryValueSource extends ValueSource {
@Override @Override
public DocValues getValues(Map fcontext, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map fcontext, AtomicReaderContext readerContext) throws IOException {
return new QueryDocValues(readerContext, q, defVal, fcontext); return new QueryDocValues(this, readerContext, fcontext);
} }
@Override @Override
@ -72,31 +74,31 @@ public class QueryValueSource extends ValueSource {
} }
class QueryDocValues extends DocValues { class QueryDocValues extends FloatDocValues {
final Query q;
// final IndexReader reader;
final AtomicReaderContext readerContext; final AtomicReaderContext readerContext;
final Weight weight; final Weight weight;
final float defVal; final float defVal;
final Map fcontext; final Map fcontext;
final Query q;
Scorer scorer; Scorer scorer;
int scorerDoc; // the document the scorer is on int scorerDoc; // the document the scorer is on
boolean noMatches=false;
// the last document requested... start off with high value // the last document requested... start off with high value
// to trigger a scorer reset on first access. // to trigger a scorer reset on first access.
int lastDocRequested=Integer.MAX_VALUE; int lastDocRequested=Integer.MAX_VALUE;
public QueryDocValues(AtomicReaderContext readerContext, Query q, float defVal, Map fcontext) throws IOException { public QueryDocValues(QueryValueSource vs, AtomicReaderContext readerContext, Map fcontext) throws IOException {
IndexReader reader = readerContext.reader; super(vs);
this.readerContext = readerContext; this.readerContext = readerContext;
this.q = q; this.defVal = vs.defVal;
this.defVal = defVal; this.q = vs.q;
this.fcontext = fcontext; this.fcontext = fcontext;
Weight w = fcontext==null ? null : (Weight)fcontext.get(q); Weight w = fcontext==null ? null : (Weight)fcontext.get(q);
// TODO: sort by function doesn't weight (SOLR-1297 is open because of this bug)... so weightSearcher will currently be null
if (w == null) { if (w == null) {
IndexSearcher weightSearcher; IndexSearcher weightSearcher;
if(fcontext == null) { if(fcontext == null) {
@ -116,8 +118,12 @@ class QueryDocValues extends DocValues {
public float floatVal(int doc) { public float floatVal(int doc) {
try { try {
if (doc < lastDocRequested) { if (doc < lastDocRequested) {
if (noMatches) return defVal;
scorer = weight.scorer(readerContext, ScorerContext.def()); scorer = weight.scorer(readerContext, ScorerContext.def());
if (scorer==null) return defVal; if (scorer==null) {
noMatches = true;
return defVal;
}
scorerDoc = -1; scorerDoc = -1;
} }
lastDocRequested = doc; lastDocRequested = doc;
@ -137,24 +143,104 @@ class QueryDocValues extends DocValues {
} catch (IOException e) { } catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e);
} }
} }
@Override @Override
public int intVal(int doc) { public boolean exists(int doc) {
return (int)floatVal(doc); try {
if (doc < lastDocRequested) {
if (noMatches) return false;
scorer = weight.scorer(readerContext, ScorerContext.def());
scorerDoc = -1;
if (scorer==null) {
noMatches = true;
return false;
}
}
lastDocRequested = doc;
if (scorerDoc < doc) {
scorerDoc = scorer.advance(doc);
}
if (scorerDoc > doc) {
// query doesn't match this document... either because we hit the
// end, or because the next doc is after this doc.
return false;
}
// a match!
return true;
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e);
}
} }
@Override
public Object objectVal(int doc) {
try {
return exists(doc) ? scorer.score() : null;
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e);
}
}
@Override @Override
public long longVal(int doc) { public ValueFiller getValueFiller() {
return (long)floatVal(doc); //
} // TODO: if we want to support more than one value-filler or a value-filler in conjunction with
@Override // the DocValues, then members like "scorer" should be per ValueFiller instance.
public double doubleVal(int doc) { // Or we can say that the user should just instantiate multiple DocValues.
return (double)floatVal(doc); //
} return new ValueFiller() {
@Override private final MutableValueFloat mval = new MutableValueFloat();
public String strVal(int doc) {
return Float.toString(floatVal(doc)); @Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
try {
if (noMatches) {
mval.value = defVal;
mval.exists = false;
return;
}
scorer = weight.scorer(readerContext, ScorerContext.def());
scorerDoc = -1;
if (scorer==null) {
noMatches = true;
mval.value = defVal;
mval.exists = false;
return;
}
lastDocRequested = doc;
if (scorerDoc < doc) {
scorerDoc = scorer.advance(doc);
}
if (scorerDoc > doc) {
// query doesn't match this document... either because we hit the
// end, or because the next doc is after this doc.
mval.value = defVal;
mval.exists = false;
return;
}
// a match!
mval.value = scorer.score();
mval.exists = true;
return;
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "caught exception in QueryDocVals("+q+") doc="+doc, e);
}
}
};
} }
@Override @Override
public String toString(int doc) { public String toString(int doc) {
return "query(" + q + ",def=" + defVal + ")=" + floatVal(doc); return "query(" + q + ",def=" + defVal + ")=" + floatVal(doc);

View File

@ -54,29 +54,13 @@ public class RangeMapFloatFunction extends ValueSource {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DocValues vals = source.getValues(context, readerContext); final DocValues vals = source.getValues(context, readerContext);
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
float val = vals.floatVal(doc); float val = vals.floatVal(doc);
return (val>=min && val<=max) ? target : (defaultVal == null ? val : defaultVal); return (val>=min && val<=max) ? target : (defaultVal == null ? val : defaultVal);
} }
@Override @Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
return "map(" + vals.toString(doc) + ",min=" + min + ",max=" + max + ",target=" + target + ")"; return "map(" + vals.toString(doc) + ",min=" + min + ",max=" + max + ",target=" + target + ")";
} }

View File

@ -60,28 +60,12 @@ public class ReciprocalFloatFunction extends ValueSource {
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DocValues vals = source.getValues(context, readerContext); final DocValues vals = source.getValues(context, readerContext);
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return a/(m*vals.floatVal(doc) + b); return a/(m*vals.floatVal(doc) + b);
} }
@Override @Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
return Float.toString(a) + "/(" return Float.toString(a) + "/("
+ m + "*float(" + vals.toString(doc) + ')' + m + "*float(" + vals.toString(doc) + ')'

View File

@ -66,47 +66,11 @@ public class ReverseOrdFieldSource extends ValueSource {
final FieldCache.DocTermsIndex sindex = FieldCache.DEFAULT.getTermsIndex(topReader, field); final FieldCache.DocTermsIndex sindex = FieldCache.DEFAULT.getTermsIndex(topReader, field);
final int end = sindex.numOrd(); final int end = sindex.numOrd();
return new DocValues() { return new IntDocValues(this) {
@Override @Override
public float floatVal(int doc) {
return (float)(end - sindex.getOrd(doc+off));
}
@Override
public int intVal(int doc) { public int intVal(int doc) {
return (end - sindex.getOrd(doc+off)); return (end - sindex.getOrd(doc+off));
} }
@Override
public long longVal(int doc) {
return (long)(end - sindex.getOrd(doc+off));
}
@Override
public int ordVal(int doc) {
return (end - sindex.getOrd(doc+off));
}
@Override
public int numOrd() {
return end;
}
@Override
public double doubleVal(int doc) {
return (double)(end - sindex.getOrd(doc+off));
}
@Override
public String strVal(int doc) {
// the string value of the ordinal, not the string itself
return Integer.toString((end - sindex.getOrd(doc+off)));
}
@Override
public String toString(int doc) {
return description() + '=' + strVal(doc);
}
}; };
} }

View File

@ -108,28 +108,12 @@ public class ScaleFloatFunction extends ValueSource {
final DocValues vals = source.getValues(context, readerContext); final DocValues vals = source.getValues(context, readerContext);
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return (vals.floatVal(doc) - minSource) * scale + min; return (vals.floatVal(doc) - minSource) * scale + min;
} }
@Override @Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
return "scale(" + vals.toString(doc) + ",toMin=" + min + ",toMax=" + max return "scale(" + vals.toString(doc) + ",toMin=" + min + ",toMax=" + max
+ ",fromMin=" + minSource + ",fromMin=" + minSource

View File

@ -34,28 +34,12 @@ import java.util.Map;
@Override @Override
public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final DocValues vals = source.getValues(context, readerContext); final DocValues vals = source.getValues(context, readerContext);
return new DocValues() { return new FloatDocValues(this) {
@Override @Override
public float floatVal(int doc) { public float floatVal(int doc) {
return func(doc, vals); return func(doc, vals);
} }
@Override @Override
public int intVal(int doc) {
return (int)floatVal(doc);
}
@Override
public long longVal(int doc) {
return (long)floatVal(doc);
}
@Override
public double doubleVal(int doc) {
return (double)floatVal(doc);
}
@Override
public String strVal(int doc) {
return Float.toString(floatVal(doc));
}
@Override
public String toString(int doc) { public String toString(int doc) {
return name() + '(' + vals.toString(doc) + ')'; return name() + '(' + vals.toString(doc) + ')';
} }

Some files were not shown because too many files have changed in this diff Show More